blob: 32f313ff3bb986613208d0fdc8871add97547d69 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070019#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070020
21#include <cstdio>
22
23namespace art {
24
25
26class StubTest : public CommonRuntimeTest {
27 protected:
28 // We need callee-save methods set up in the Runtime for exceptions.
29 void SetUp() OVERRIDE {
30 // Do the normal setup.
31 CommonRuntimeTest::SetUp();
32
33 {
34 // Create callee-save methods
35 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010036 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070037 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
38 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
39 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 }
42 }
43 }
44 }
45
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070046 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
47 // Use a smaller heap
48 for (std::pair<std::string, const void*>& pair : *options) {
49 if (pair.first.find("-Xmx") == 0) {
50 pair.first = "-Xmx4M"; // Smallest we can go.
51 }
52 }
53 }
Andreas Gampe525cde22014-04-22 15:44:50 -070054
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070055 // Helper function needed since TEST_F makes a new class.
56 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
57 return &self->tlsPtr_;
58 }
59
Andreas Gampe4fc046e2014-05-06 16:56:39 -070060 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070061 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
62 // Push a transition back into managed code onto the linked list in thread.
63 ManagedStack fragment;
64 self->PushManagedStackFragment(&fragment);
65
66 size_t result;
67#if defined(__i386__)
68 // TODO: Set the thread?
69 __asm__ __volatile__(
70 "pushl $0\n\t" // Push nullptr to terminate quick stack
71 "call *%%edi\n\t" // Call the stub
72 "addl $4, %%esp" // Pop nullptr
73 : "=a" (result)
74 // Use the result from eax
75 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code)
76 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
77 : ); // clobber.
78 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
79 // but compilation fails when declaring that.
80#elif defined(__arm__)
81 __asm__ __volatile__(
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070082 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
83 ".cfi_adjust_cfa_offset 52\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -070084 "push {r9}\n\t"
85 ".cfi_adjust_cfa_offset 4\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 "mov r9, #0\n\n"
87 "str r9, [sp, #-8]!\n\t" // Push nullptr to terminate stack, +8B padding so 16B aligned
88 ".cfi_adjust_cfa_offset 8\n\t"
89 "ldr r9, [sp, #8]\n\t"
90
91 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
92 "sub sp, sp, #20\n\t"
93 "str %[arg0], [sp]\n\t"
94 "str %[arg1], [sp, #4]\n\t"
95 "str %[arg2], [sp, #8]\n\t"
96 "str %[code], [sp, #12]\n\t"
97 "str %[self], [sp, #16]\n\t"
98 "ldr r0, [sp]\n\t"
99 "ldr r1, [sp, #4]\n\t"
100 "ldr r2, [sp, #8]\n\t"
101 "ldr r3, [sp, #12]\n\t"
102 "ldr r9, [sp, #16]\n\t"
103 "add sp, sp, #20\n\t"
104
105 "blx r3\n\t" // Call the stub
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700106 "add sp, sp, #12\n\t" // Pop nullptr and padding
107 ".cfi_adjust_cfa_offset -12\n\t"
108 "pop {r1-r12, lr}\n\t" // Restore state
109 ".cfi_adjust_cfa_offset -52\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -0700110 "mov %[result], r0\n\t" // Save the result
111 : [result] "=r" (result)
112 // Use the result from r0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700113 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self)
Andreas Gampe525cde22014-04-22 15:44:50 -0700114 : ); // clobber.
115#elif defined(__aarch64__)
116 __asm__ __volatile__(
117 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700118 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700119 "stp xzr, x1, [sp]\n\t" // nullptr(end of quick stack), x1
120 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
121 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
122
123 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
124 "sub sp, sp, #48\n\t"
125 "str %[arg0], [sp]\n\t"
126 "str %[arg1], [sp, #8]\n\t"
127 "str %[arg2], [sp, #16]\n\t"
128 "str %[code], [sp, #24]\n\t"
129 "str %[self], [sp, #32]\n\t"
130 "ldr x0, [sp]\n\t"
131 "ldr x1, [sp, #8]\n\t"
132 "ldr x2, [sp, #16]\n\t"
133 "ldr x3, [sp, #24]\n\t"
134 "ldr x18, [sp, #32]\n\t"
135 "add sp, sp, #48\n\t"
136
137 "blr x3\n\t" // Call the stub
Andreas Gampe525cde22014-04-22 15:44:50 -0700138 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700139 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
140 "ldr x30, [sp, #40]\n\t" // Restore xLR
Andreas Gampe525cde22014-04-22 15:44:50 -0700141 "add sp, sp, #48\n\t" // Free stack space
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700142 ".cfi_adjust_cfa_offset -48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
Andreas Gampe525cde22014-04-22 15:44:50 -0700144 "mov %[result], x0\n\t" // Save the result
145 : [result] "=r" (result)
146 // Use the result from r0
147 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700148 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
Andreas Gampe525cde22014-04-22 15:44:50 -0700149#elif defined(__x86_64__)
150 // Note: Uses the native convention
151 // TODO: Set the thread?
152 __asm__ __volatile__(
153 "pushq $0\n\t" // Push nullptr to terminate quick stack
154 "pushq $0\n\t" // 16B alignment padding
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700155 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -0700156 "call *%%rax\n\t" // Call the stub
Andreas Gampef4e910b2014-04-29 16:55:52 -0700157 "addq $16, %%rsp\n\t" // Pop nullptr and padding
158 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -0700159 : "=a" (result)
160 // Use the result from rax
161 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code)
162 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampef4e910b2014-04-29 16:55:52 -0700163 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
Andreas Gampe525cde22014-04-22 15:44:50 -0700164 // TODO: Should we clobber the other registers?
Andreas Gampe525cde22014-04-22 15:44:50 -0700165#else
166 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
167 result = 0;
168#endif
169 // Pop transition.
170 self->PopManagedStackFragment(fragment);
171 return result;
172 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700173
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700174 // TODO: Set up a frame according to referrer's specs.
175 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
176 mirror::ArtMethod* referrer) {
177 // Push a transition back into managed code onto the linked list in thread.
178 ManagedStack fragment;
179 self->PushManagedStackFragment(&fragment);
180
181 size_t result;
182#if defined(__i386__)
183 // TODO: Set the thread?
184 __asm__ __volatile__(
185 "pushl %[referrer]\n\t" // Store referrer
186 "call *%%edi\n\t" // Call the stub
187 "addl $4, %%esp" // Pop referrer
188 : "=a" (result)
189 // Use the result from eax
190 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
191 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
192 : ); // clobber.
193 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
194 // but compilation fails when declaring that.
195#elif defined(__arm__)
196 __asm__ __volatile__(
197 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
198 ".cfi_adjust_cfa_offset 52\n\t"
199 "push {r9}\n\t"
200 ".cfi_adjust_cfa_offset 4\n\t"
201 "mov r9, %[referrer]\n\n"
202 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
203 ".cfi_adjust_cfa_offset 8\n\t"
204 "ldr r9, [sp, #8]\n\t"
205
206 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
207 "sub sp, sp, #20\n\t"
208 "str %[arg0], [sp]\n\t"
209 "str %[arg1], [sp, #4]\n\t"
210 "str %[arg2], [sp, #8]\n\t"
211 "str %[code], [sp, #12]\n\t"
212 "str %[self], [sp, #16]\n\t"
213 "ldr r0, [sp]\n\t"
214 "ldr r1, [sp, #4]\n\t"
215 "ldr r2, [sp, #8]\n\t"
216 "ldr r3, [sp, #12]\n\t"
217 "ldr r9, [sp, #16]\n\t"
218 "add sp, sp, #20\n\t"
219
220 "blx r3\n\t" // Call the stub
221 "add sp, sp, #12\n\t" // Pop nullptr and padding
222 ".cfi_adjust_cfa_offset -12\n\t"
223 "pop {r1-r12, lr}\n\t" // Restore state
224 ".cfi_adjust_cfa_offset -52\n\t"
225 "mov %[result], r0\n\t" // Save the result
226 : [result] "=r" (result)
227 // Use the result from r0
228 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
229 [referrer] "r"(referrer)
230 : ); // clobber.
231#elif defined(__aarch64__)
232 __asm__ __volatile__(
233 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
234 ".cfi_adjust_cfa_offset 48\n\t"
235 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
236 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
237 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
238
239 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
240 "sub sp, sp, #48\n\t"
241 "str %[arg0], [sp]\n\t"
242 "str %[arg1], [sp, #8]\n\t"
243 "str %[arg2], [sp, #16]\n\t"
244 "str %[code], [sp, #24]\n\t"
245 "str %[self], [sp, #32]\n\t"
246 "ldr x0, [sp]\n\t"
247 "ldr x1, [sp, #8]\n\t"
248 "ldr x2, [sp, #16]\n\t"
249 "ldr x3, [sp, #24]\n\t"
250 "ldr x18, [sp, #32]\n\t"
251 "add sp, sp, #48\n\t"
252
253 "blr x3\n\t" // Call the stub
254 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
255 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
256 "ldr x30, [sp, #40]\n\t" // Restore xLR
257 "add sp, sp, #48\n\t" // Free stack space
258 ".cfi_adjust_cfa_offset -48\n\t"
259
260 "mov %[result], x0\n\t" // Save the result
261 : [result] "=r" (result)
262 // Use the result from r0
263 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
264 [referrer] "r"(referrer)
265 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
266#elif defined(__x86_64__)
267 // Note: Uses the native convention
268 // TODO: Set the thread?
269 __asm__ __volatile__(
270 "pushq %[referrer]\n\t" // Push referrer
271 "pushq (%%rsp)\n\t" // & 16B alignment padding
272 ".cfi_adjust_cfa_offset 16\n\t"
273 "call *%%rax\n\t" // Call the stub
274 "addq $16, %%rsp\n\t" // Pop nullptr and padding
275 ".cfi_adjust_cfa_offset -16\n\t"
276 : "=a" (result)
277 // Use the result from rax
278 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
279 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
280 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
281 // TODO: Should we clobber the other registers?
282#else
283 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
284 result = 0;
285#endif
286 // Pop transition.
287 self->PopManagedStackFragment(fragment);
288 return result;
289 }
290
291 // Method with 32b arg0, 64b arg1
292 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
293 mirror::ArtMethod* referrer) {
294#if defined(__x86_64__) || defined(__aarch64__)
295 // Just pass through.
296 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
297#else
298 // Need to split up arguments.
299 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
300 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
301
302 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
303#endif
304 }
305
306 // Method with 32b arg0, 32b arg1, 64b arg2
307 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
308 Thread* self, mirror::ArtMethod* referrer) {
309#if defined(__x86_64__) || defined(__aarch64__)
310 // Just pass through.
311 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
312#else
313 // TODO: Needs 4-param invoke.
314 return 0;
315#endif
316 }
Andreas Gampe525cde22014-04-22 15:44:50 -0700317};
318
319
320#if defined(__i386__) || defined(__x86_64__)
321extern "C" void art_quick_memcpy(void);
322#endif
323
324TEST_F(StubTest, Memcpy) {
325#if defined(__i386__) || defined(__x86_64__)
326 Thread* self = Thread::Current();
327
328 uint32_t orig[20];
329 uint32_t trg[20];
330 for (size_t i = 0; i < 20; ++i) {
331 orig[i] = i;
332 trg[i] = 0;
333 }
334
335 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
336 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
337
338 EXPECT_EQ(orig[0], trg[0]);
339
340 for (size_t i = 1; i < 4; ++i) {
341 EXPECT_NE(orig[i], trg[i]);
342 }
343
344 for (size_t i = 4; i < 14; ++i) {
345 EXPECT_EQ(orig[i], trg[i]);
346 }
347
348 for (size_t i = 14; i < 20; ++i) {
349 EXPECT_NE(orig[i], trg[i]);
350 }
351
352 // TODO: Test overlapping?
353
354#else
355 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
356 // Force-print to std::cout so it's also outside the logcat.
357 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
358#endif
359}
360
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700361#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700362extern "C" void art_quick_lock_object(void);
363#endif
364
365TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700366#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700367 static constexpr size_t kThinLockLoops = 100;
368
Andreas Gampe525cde22014-04-22 15:44:50 -0700369 Thread* self = Thread::Current();
370 // Create an object
371 ScopedObjectAccess soa(self);
372 // garbage is created during ClassLinker::Init
373
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700374 StackHandleScope<2> hs(soa.Self());
375 Handle<mirror::String> obj(
376 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700377 LockWord lock = obj->GetLockWord(false);
378 LockWord::LockState old_state = lock.GetState();
379 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
380
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700381 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700382 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
383
384 LockWord lock_after = obj->GetLockWord(false);
385 LockWord::LockState new_state = lock_after.GetState();
386 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700387 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
388
389 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700390 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700391 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
392
393 // Check we're at lock count i
394
395 LockWord l_inc = obj->GetLockWord(false);
396 LockWord::LockState l_inc_state = l_inc.GetState();
397 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
398 EXPECT_EQ(l_inc.ThinLockCount(), i);
399 }
400
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700401 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700402 Handle<mirror::String> obj2(hs.NewHandle(
403 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700404
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700405 obj2->IdentityHashCode();
406
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700407 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700408 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
409
410 LockWord lock_after2 = obj2->GetLockWord(false);
411 LockWord::LockState new_state2 = lock_after2.GetState();
412 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
413 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
414
415 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700416#else
417 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
418 // Force-print to std::cout so it's also outside the logcat.
419 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
420#endif
421}
422
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700423
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700424class RandGen {
425 public:
426 explicit RandGen(uint32_t seed) : val_(seed) {}
427
428 uint32_t next() {
429 val_ = val_ * 48271 % 2147483647 + 13;
430 return val_;
431 }
432
433 uint32_t val_;
434};
435
436
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700437#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700438extern "C" void art_quick_lock_object(void);
439extern "C" void art_quick_unlock_object(void);
440#endif
441
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700442// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
443static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
444#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700445 static constexpr size_t kThinLockLoops = 100;
446
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700447 Thread* self = Thread::Current();
448 // Create an object
449 ScopedObjectAccess soa(self);
450 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700451 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
452 StackHandleScope<kNumberOfLocks + 1> hs(self);
453 Handle<mirror::String> obj(
454 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700455 LockWord lock = obj->GetLockWord(false);
456 LockWord::LockState old_state = lock.GetState();
457 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
458
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700459 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700460 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700461 // This should be an illegal monitor state.
462 EXPECT_TRUE(self->IsExceptionPending());
463 self->ClearException();
464
465 LockWord lock_after = obj->GetLockWord(false);
466 LockWord::LockState new_state = lock_after.GetState();
467 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700468
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700469 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700470 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700471
472 LockWord lock_after2 = obj->GetLockWord(false);
473 LockWord::LockState new_state2 = lock_after2.GetState();
474 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
475
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700476 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700477 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700478
479 LockWord lock_after3 = obj->GetLockWord(false);
480 LockWord::LockState new_state3 = lock_after3.GetState();
481 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
482
483 // Stress test:
484 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
485 // each step.
486
487 RandGen r(0x1234);
488
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700489 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700490 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700491
492 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700493 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700494 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700495
496 // Initialize = allocate.
497 for (size_t i = 0; i < kNumberOfLocks; ++i) {
498 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700499 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700500 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700501 }
502
503 for (size_t i = 0; i < kIterations; ++i) {
504 // Select which lock to update.
505 size_t index = r.next() % kNumberOfLocks;
506
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700507 // Make lock fat?
508 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
509 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700510 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700511
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700512 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700513 LockWord::LockState iter_state = lock_iter.GetState();
514 if (counts[index] == 0) {
515 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
516 } else {
517 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
518 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700519 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700520 bool lock; // Whether to lock or unlock in this step.
521 if (counts[index] == 0) {
522 lock = true;
523 } else if (counts[index] == kThinLockLoops) {
524 lock = false;
525 } else {
526 // Randomly.
527 lock = r.next() % 2 == 0;
528 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700529
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700530 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700531 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700532 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
533 counts[index]++;
534 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700535 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700536 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
537 counts[index]--;
538 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700539
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700540 EXPECT_FALSE(self->IsExceptionPending());
541
542 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700543 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700544 LockWord::LockState iter_state = lock_iter.GetState();
545 if (fat[index]) {
546 // Abuse MonitorInfo.
547 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700548 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700549 EXPECT_EQ(counts[index], info.entry_count_) << index;
550 } else {
551 if (counts[index] > 0) {
552 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
553 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
554 } else {
555 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
556 }
557 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700558 }
559 }
560
561 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700562 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700563 for (size_t i = 0; i < kNumberOfLocks; ++i) {
564 size_t index = kNumberOfLocks - 1 - i;
565 size_t count = counts[index];
566 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700567 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700568 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700569 count--;
570 }
571
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700572 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700573 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700574 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
575 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700576 }
577
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700578 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700579#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700580 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700581 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700582 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700583#endif
584}
585
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700586TEST_F(StubTest, UnlockObject) {
587 TestUnlockObject(this);
588}
Andreas Gampe525cde22014-04-22 15:44:50 -0700589
590#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
591extern "C" void art_quick_check_cast(void);
592#endif
593
594TEST_F(StubTest, CheckCast) {
595#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
596 Thread* self = Thread::Current();
597 // Find some classes.
598 ScopedObjectAccess soa(self);
599 // garbage is created during ClassLinker::Init
600
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700601 StackHandleScope<2> hs(soa.Self());
602 Handle<mirror::Class> c(
603 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
604 Handle<mirror::Class> c2(
605 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700606
607 EXPECT_FALSE(self->IsExceptionPending());
608
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700609 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700610 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
611
612 EXPECT_FALSE(self->IsExceptionPending());
613
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700614 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700615 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
616
617 EXPECT_FALSE(self->IsExceptionPending());
618
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700619 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700620 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
621
622 EXPECT_FALSE(self->IsExceptionPending());
623
624 // TODO: Make the following work. But that would require correct managed frames.
625
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700626 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700627 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
628
629 EXPECT_TRUE(self->IsExceptionPending());
630 self->ClearException();
631
632#else
633 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
634 // Force-print to std::cout so it's also outside the logcat.
635 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
636#endif
637}
638
639
Andreas Gampef4e910b2014-04-29 16:55:52 -0700640#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700641extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
642// Do not check non-checked ones, we'd need handlers and stuff...
643#endif
644
645TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700646 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
647
Andreas Gampef4e910b2014-04-29 16:55:52 -0700648#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700649 Thread* self = Thread::Current();
650 // Create an object
651 ScopedObjectAccess soa(self);
652 // garbage is created during ClassLinker::Init
653
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700654 StackHandleScope<5> hs(soa.Self());
655 Handle<mirror::Class> c(
656 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
657 Handle<mirror::Class> ca(
658 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700659
660 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700661 Handle<mirror::ObjectArray<mirror::Object>> array(
662 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700663
664 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700665 Handle<mirror::String> str_obj(
666 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700667
668 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700670
671 // Play with it...
672
673 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700674 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700675
676 EXPECT_FALSE(self->IsExceptionPending());
677
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700678 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700679 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
680
681 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700682 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700684 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700685 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
686
687 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700688 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700689
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700690 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700691 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
692
693 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700694 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700695
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700696 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700697 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
698
699 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700700 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700701
702 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700703
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700704 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700705 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
706
707 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700708 EXPECT_EQ(nullptr, array->Get(0));
709
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700710 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700711 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
712
713 EXPECT_FALSE(self->IsExceptionPending());
714 EXPECT_EQ(nullptr, array->Get(1));
715
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700716 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700717 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
718
719 EXPECT_FALSE(self->IsExceptionPending());
720 EXPECT_EQ(nullptr, array->Get(2));
721
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700722 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700723 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
724
725 EXPECT_FALSE(self->IsExceptionPending());
726 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700727
728 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
729
730 // 2) Failure cases (str into str[])
731 // 2.1) Array = null
732 // TODO: Throwing NPE needs actual DEX code
733
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700734// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700735// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
736//
737// EXPECT_TRUE(self->IsExceptionPending());
738// self->ClearException();
739
740 // 2.2) Index < 0
741
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700742 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
743 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700744 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
745
746 EXPECT_TRUE(self->IsExceptionPending());
747 self->ClearException();
748
749 // 2.3) Index > 0
750
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700751 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700752 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
753
754 EXPECT_TRUE(self->IsExceptionPending());
755 self->ClearException();
756
757 // 3) Failure cases (obj into str[])
758
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700760 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
761
762 EXPECT_TRUE(self->IsExceptionPending());
763 self->ClearException();
764
765 // Tests done.
766#else
767 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
768 // Force-print to std::cout so it's also outside the logcat.
769 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
770#endif
771}
772
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700773TEST_F(StubTest, AllocObject) {
774 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
775
776#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
777 // TODO: Check the "Unresolved" allocation stubs
778
779 Thread* self = Thread::Current();
780 // Create an object
781 ScopedObjectAccess soa(self);
782 // garbage is created during ClassLinker::Init
783
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700784 StackHandleScope<2> hs(soa.Self());
785 Handle<mirror::Class> c(
786 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700787
788 // Play with it...
789
790 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700791 {
792 // Use an arbitrary method from c to use as referrer
793 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
794 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
795 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700796 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700797 self);
798
799 EXPECT_FALSE(self->IsExceptionPending());
800 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
801 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700802 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700803 VerifyObject(obj);
804 }
805
806 {
807 // We can use nullptr in the second argument as we do not need a method here (not used in
808 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700809 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700810 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700811 self);
812
813 EXPECT_FALSE(self->IsExceptionPending());
814 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
815 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700816 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700817 VerifyObject(obj);
818 }
819
820 {
821 // We can use nullptr in the second argument as we do not need a method here (not used in
822 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700823 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700824 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700825 self);
826
827 EXPECT_FALSE(self->IsExceptionPending());
828 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
829 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700830 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700831 VerifyObject(obj);
832 }
833
834 // Failure tests.
835
836 // Out-of-memory.
837 {
838 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
839
840 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700841 Handle<mirror::Class> ca(
842 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
843
844 // Use arbitrary large amount for now.
845 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -0700846 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700847
848 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700849 // Start allocating with 128K
850 size_t length = 128 * KB / 4;
851 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700852 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
853 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
854 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700855 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700856
857 // Try a smaller length
858 length = length / 8;
859 // Use at most half the reported free space.
860 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
861 if (length * 8 > mem) {
862 length = mem / 8;
863 }
864 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700865 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700866 }
867 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700868 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700869
870 // Allocate simple objects till it fails.
871 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700872 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
873 if (!self->IsExceptionPending() && h.Get() != nullptr) {
874 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700875 }
876 }
877 self->ClearException();
878
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700880 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700881 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700882 EXPECT_TRUE(self->IsExceptionPending());
883 self->ClearException();
884 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700885 }
886
887 // Tests done.
888#else
889 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
890 // Force-print to std::cout so it's also outside the logcat.
891 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
892#endif
893}
894
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700895TEST_F(StubTest, AllocObjectArray) {
896 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
897
898#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
899 // TODO: Check the "Unresolved" allocation stubs
900
901 Thread* self = Thread::Current();
902 // Create an object
903 ScopedObjectAccess soa(self);
904 // garbage is created during ClassLinker::Init
905
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 StackHandleScope<2> hs(self);
907 Handle<mirror::Class> c(
908 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700909
910 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700911 Handle<mirror::Class> c_obj(
912 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700913
914 // Play with it...
915
916 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700917
918 // For some reason this does not work, as the type_idx is artificial and outside what the
919 // resolved types of c_obj allow...
920
921 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700922 // Use an arbitrary method from c to use as referrer
923 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
924 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
925 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700926 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700927 self);
928
929 EXPECT_FALSE(self->IsExceptionPending());
930 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
931 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700933 VerifyObject(obj);
934 EXPECT_EQ(obj->GetLength(), 10);
935 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700936
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700937 {
938 // We can use nullptr in the second argument as we do not need a method here (not used in
939 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700941 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700942 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700943 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700944 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
945 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
946 EXPECT_TRUE(obj->IsArrayInstance());
947 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700948 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700949 VerifyObject(obj);
950 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
951 EXPECT_EQ(array->GetLength(), 10);
952 }
953
954 // Failure tests.
955
956 // Out-of-memory.
957 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700958 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700959 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700960 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700961 self);
962
963 EXPECT_TRUE(self->IsExceptionPending());
964 self->ClearException();
965 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
966 }
967
968 // Tests done.
969#else
970 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
971 // Force-print to std::cout so it's also outside the logcat.
972 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
973#endif
974}
975
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700976
Andreas Gampe266340d2014-05-02 07:55:24 -0700977#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700978extern "C" void art_quick_string_compareto(void);
979#endif
980
981TEST_F(StubTest, StringCompareTo) {
982 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
983
Andreas Gampe266340d2014-05-02 07:55:24 -0700984#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700985 // TODO: Check the "Unresolved" allocation stubs
986
987 Thread* self = Thread::Current();
988 ScopedObjectAccess soa(self);
989 // garbage is created during ClassLinker::Init
990
991 // Create some strings
992 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700993 // Setup: The first half is standard. The second half uses a non-zero offset.
994 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700995 static constexpr size_t kBaseStringCount = 7;
996 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700997
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700998 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700999
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001000 StackHandleScope<kStringCount> hs(self);
1001 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001002
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001003 for (size_t i = 0; i < kBaseStringCount; ++i) {
1004 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001005 }
1006
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001007 RandGen r(0x1234);
1008
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001009 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1010 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1011 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001012 if (length > 1) {
1013 // Set a random offset and length.
1014 int32_t new_offset = 1 + (r.next() % (length - 1));
1015 int32_t rest = length - new_offset - 1;
1016 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1017
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001018 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1019 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001020 }
1021 }
1022
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001023 // TODO: wide characters
1024
1025 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001026 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1027 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001028 int32_t expected[kStringCount][kStringCount];
1029 for (size_t x = 0; x < kStringCount; ++x) {
1030 for (size_t y = 0; y < kStringCount; ++y) {
1031 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001032 }
1033 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001034
1035 // Play with it...
1036
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001037 for (size_t x = 0; x < kStringCount; ++x) {
1038 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001039 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001040 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1041 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001042 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1043
1044 EXPECT_FALSE(self->IsExceptionPending());
1045
1046 // The result is a 32b signed integer
1047 union {
1048 size_t r;
1049 int32_t i;
1050 } conv;
1051 conv.r = result;
1052 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001053 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1054 conv.r;
1055 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1056 conv.r;
1057 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1058 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001059 }
1060 }
1061
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001062 // TODO: Deallocate things.
1063
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001064 // Tests done.
1065#else
1066 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1067 // Force-print to std::cout so it's also outside the logcat.
1068 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1069 std::endl;
1070#endif
1071}
1072
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001073
1074#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1075extern "C" void art_quick_set32_static(void);
1076extern "C" void art_quick_get32_static(void);
1077#endif
1078
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001079static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001080 mirror::ArtMethod* referrer, StubTest* test)
1081 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1082#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1083 constexpr size_t num_values = 7;
1084 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1085
1086 for (size_t i = 0; i < num_values; ++i) {
1087 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1088 static_cast<size_t>(values[i]),
1089 0U,
1090 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1091 self,
1092 referrer);
1093
1094 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1095 0U, 0U,
1096 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1097 self,
1098 referrer);
1099
1100 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1101 }
1102#else
1103 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1104 // Force-print to std::cout so it's also outside the logcat.
1105 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1106#endif
1107}
1108
1109
1110#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1111extern "C" void art_quick_set32_instance(void);
1112extern "C" void art_quick_get32_instance(void);
1113#endif
1114
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001115static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001116 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1117 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1118#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1119 constexpr size_t num_values = 7;
1120 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1121
1122 for (size_t i = 0; i < num_values; ++i) {
1123 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001124 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001125 static_cast<size_t>(values[i]),
1126 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1127 self,
1128 referrer);
1129
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001130 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001131 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1132
1133 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001134 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001135
1136 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001138 0U,
1139 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1140 self,
1141 referrer);
1142 EXPECT_EQ(res, static_cast<int32_t>(res2));
1143 }
1144#else
1145 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1146 // Force-print to std::cout so it's also outside the logcat.
1147 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1148#endif
1149}
1150
1151
1152#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1153extern "C" void art_quick_set_obj_static(void);
1154extern "C" void art_quick_get_obj_static(void);
1155
1156static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1157 mirror::ArtMethod* referrer, StubTest* test)
1158 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1159 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1160 reinterpret_cast<size_t>(val),
1161 0U,
1162 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1163 self,
1164 referrer);
1165
1166 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1167 0U, 0U,
1168 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1169 self,
1170 referrer);
1171
1172 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1173}
1174#endif
1175
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001177 mirror::ArtMethod* referrer, StubTest* test)
1178 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1179#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1180 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1181
1182 // Allocate a string object for simplicity.
1183 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1184 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1185
1186 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1187#else
1188 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1189 // Force-print to std::cout so it's also outside the logcat.
1190 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1191#endif
1192}
1193
1194
1195#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1196extern "C" void art_quick_set_obj_instance(void);
1197extern "C" void art_quick_get_obj_instance(void);
1198
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001199static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001200 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1201 StubTest* test)
1202 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1203 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1204 reinterpret_cast<size_t>(trg),
1205 reinterpret_cast<size_t>(val),
1206 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1207 self,
1208 referrer);
1209
1210 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1211 reinterpret_cast<size_t>(trg),
1212 0U,
1213 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1214 self,
1215 referrer);
1216
1217 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1218
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001219 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001220}
1221#endif
1222
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001223static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001224 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1225 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1226#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001227 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001228
1229 // Allocate a string object for simplicity.
1230 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001234#else
1235 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1236 // Force-print to std::cout so it's also outside the logcat.
1237 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1238#endif
1239}
1240
1241
1242// TODO: Complete these tests for 32b architectures.
1243
1244#if defined(__x86_64__) || defined(__aarch64__)
1245extern "C" void art_quick_set64_static(void);
1246extern "C" void art_quick_get64_static(void);
1247#endif
1248
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001249static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001250 mirror::ArtMethod* referrer, StubTest* test)
1251 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1252#if defined(__x86_64__) || defined(__aarch64__)
1253 constexpr size_t num_values = 8;
1254 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1255
1256 for (size_t i = 0; i < num_values; ++i) {
1257 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1258 values[i],
1259 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1260 self,
1261 referrer);
1262
1263 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1264 0U, 0U,
1265 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1266 self,
1267 referrer);
1268
1269 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1270 }
1271#else
1272 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1273 // Force-print to std::cout so it's also outside the logcat.
1274 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1275#endif
1276}
1277
1278
1279#if defined(__x86_64__) || defined(__aarch64__)
1280extern "C" void art_quick_set64_instance(void);
1281extern "C" void art_quick_get64_instance(void);
1282#endif
1283
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001284static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001285 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1286 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1287#if defined(__x86_64__) || defined(__aarch64__)
1288 constexpr size_t num_values = 8;
1289 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1290
1291 for (size_t i = 0; i < num_values; ++i) {
1292 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001293 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001294 static_cast<size_t>(values[i]),
1295 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1296 self,
1297 referrer);
1298
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001299 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001300 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1301
1302 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001303 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001304
1305 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001306 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001307 0U,
1308 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1309 self,
1310 referrer);
1311 EXPECT_EQ(res, static_cast<int64_t>(res2));
1312 }
1313#else
1314 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1315 // Force-print to std::cout so it's also outside the logcat.
1316 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1317#endif
1318}
1319
1320static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1321 // garbage is created during ClassLinker::Init
1322
1323 JNIEnv* env = Thread::Current()->GetJniEnv();
1324 jclass jc = env->FindClass("AllFields");
1325 CHECK(jc != NULL);
1326 jobject o = env->AllocObject(jc);
1327 CHECK(o != NULL);
1328
1329 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001330 StackHandleScope<5> hs(self);
1331 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1332 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001333 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001334 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001335
1336 // Play with it...
1337
1338 // Static fields.
1339 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001340 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001341 int32_t num_fields = fields->GetLength();
1342 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001343 StackHandleScope<1> hs(self);
1344 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001345
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001346 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001347 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1348 switch (type) {
1349 case Primitive::Type::kPrimInt:
1350 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001351 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001352 }
1353 break;
1354
1355 case Primitive::Type::kPrimLong:
1356 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001357 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001358 }
1359 break;
1360
1361 case Primitive::Type::kPrimNot:
1362 // Don't try array.
1363 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001364 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001365 }
1366 break;
1367
1368 default:
1369 break; // Skip.
1370 }
1371 }
1372 }
1373
1374 // Instance fields.
1375 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001376 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001377 int32_t num_fields = fields->GetLength();
1378 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001379 StackHandleScope<1> hs(self);
1380 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001381
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001382 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001383 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1384 switch (type) {
1385 case Primitive::Type::kPrimInt:
1386 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001387 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001388 }
1389 break;
1390
1391 case Primitive::Type::kPrimLong:
1392 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001393 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001394 }
1395 break;
1396
1397 case Primitive::Type::kPrimNot:
1398 // Don't try array.
1399 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001400 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001401 }
1402 break;
1403
1404 default:
1405 break; // Skip.
1406 }
1407 }
1408 }
1409
1410 // TODO: Deallocate things.
1411}
1412
1413
1414TEST_F(StubTest, Fields32) {
1415 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1416
1417 Thread* self = Thread::Current();
1418
1419 self->TransitionFromSuspendedToRunnable();
1420 LoadDex("AllFields");
1421 bool started = runtime_->Start();
1422 CHECK(started);
1423
1424 TestFields(self, this, Primitive::Type::kPrimInt);
1425}
1426
1427TEST_F(StubTest, FieldsObj) {
1428 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1429
1430 Thread* self = Thread::Current();
1431
1432 self->TransitionFromSuspendedToRunnable();
1433 LoadDex("AllFields");
1434 bool started = runtime_->Start();
1435 CHECK(started);
1436
1437 TestFields(self, this, Primitive::Type::kPrimNot);
1438}
1439
1440TEST_F(StubTest, Fields64) {
1441 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1442
1443 Thread* self = Thread::Current();
1444
1445 self->TransitionFromSuspendedToRunnable();
1446 LoadDex("AllFields");
1447 bool started = runtime_->Start();
1448 CHECK(started);
1449
1450 TestFields(self, this, Primitive::Type::kPrimLong);
1451}
1452
Andreas Gampe525cde22014-04-22 15:44:50 -07001453} // namespace art