blob: 6a2bfb5c664b7d2bca547455502f02b7891993e6 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070019#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070020
21#include <cstdio>
22
23namespace art {
24
25
26class StubTest : public CommonRuntimeTest {
27 protected:
28 // We need callee-save methods set up in the Runtime for exceptions.
29 void SetUp() OVERRIDE {
30 // Do the normal setup.
31 CommonRuntimeTest::SetUp();
32
33 {
34 // Create callee-save methods
35 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010036 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070037 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
38 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
39 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 }
42 }
43 }
44 }
45
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070046 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
47 // Use a smaller heap
48 for (std::pair<std::string, const void*>& pair : *options) {
49 if (pair.first.find("-Xmx") == 0) {
50 pair.first = "-Xmx4M"; // Smallest we can go.
51 }
52 }
53 }
Andreas Gampe525cde22014-04-22 15:44:50 -070054
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070055 // Helper function needed since TEST_F makes a new class.
56 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
57 return &self->tlsPtr_;
58 }
59
Andreas Gampe4fc046e2014-05-06 16:56:39 -070060 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070061 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
62 // Push a transition back into managed code onto the linked list in thread.
63 ManagedStack fragment;
64 self->PushManagedStackFragment(&fragment);
65
66 size_t result;
67#if defined(__i386__)
68 // TODO: Set the thread?
69 __asm__ __volatile__(
70 "pushl $0\n\t" // Push nullptr to terminate quick stack
71 "call *%%edi\n\t" // Call the stub
72 "addl $4, %%esp" // Pop nullptr
73 : "=a" (result)
74 // Use the result from eax
75 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code)
76 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
77 : ); // clobber.
78 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
79 // but compilation fails when declaring that.
80#elif defined(__arm__)
81 __asm__ __volatile__(
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070082 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
83 ".cfi_adjust_cfa_offset 52\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -070084 "push {r9}\n\t"
85 ".cfi_adjust_cfa_offset 4\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 "mov r9, #0\n\n"
87 "str r9, [sp, #-8]!\n\t" // Push nullptr to terminate stack, +8B padding so 16B aligned
88 ".cfi_adjust_cfa_offset 8\n\t"
89 "ldr r9, [sp, #8]\n\t"
90
91 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
92 "sub sp, sp, #20\n\t"
93 "str %[arg0], [sp]\n\t"
94 "str %[arg1], [sp, #4]\n\t"
95 "str %[arg2], [sp, #8]\n\t"
96 "str %[code], [sp, #12]\n\t"
97 "str %[self], [sp, #16]\n\t"
98 "ldr r0, [sp]\n\t"
99 "ldr r1, [sp, #4]\n\t"
100 "ldr r2, [sp, #8]\n\t"
101 "ldr r3, [sp, #12]\n\t"
102 "ldr r9, [sp, #16]\n\t"
103 "add sp, sp, #20\n\t"
104
105 "blx r3\n\t" // Call the stub
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700106 "add sp, sp, #12\n\t" // Pop nullptr and padding
107 ".cfi_adjust_cfa_offset -12\n\t"
108 "pop {r1-r12, lr}\n\t" // Restore state
109 ".cfi_adjust_cfa_offset -52\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -0700110 "mov %[result], r0\n\t" // Save the result
111 : [result] "=r" (result)
112 // Use the result from r0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700113 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self)
Andreas Gampe525cde22014-04-22 15:44:50 -0700114 : ); // clobber.
115#elif defined(__aarch64__)
116 __asm__ __volatile__(
117 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700118 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700119 "stp xzr, x1, [sp]\n\t" // nullptr(end of quick stack), x1
120 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
121 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
122
123 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
124 "sub sp, sp, #48\n\t"
125 "str %[arg0], [sp]\n\t"
126 "str %[arg1], [sp, #8]\n\t"
127 "str %[arg2], [sp, #16]\n\t"
128 "str %[code], [sp, #24]\n\t"
129 "str %[self], [sp, #32]\n\t"
130 "ldr x0, [sp]\n\t"
131 "ldr x1, [sp, #8]\n\t"
132 "ldr x2, [sp, #16]\n\t"
133 "ldr x3, [sp, #24]\n\t"
134 "ldr x18, [sp, #32]\n\t"
135 "add sp, sp, #48\n\t"
136
137 "blr x3\n\t" // Call the stub
Andreas Gampe525cde22014-04-22 15:44:50 -0700138 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700139 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
140 "ldr x30, [sp, #40]\n\t" // Restore xLR
Andreas Gampe525cde22014-04-22 15:44:50 -0700141 "add sp, sp, #48\n\t" // Free stack space
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700142 ".cfi_adjust_cfa_offset -48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
Andreas Gampe525cde22014-04-22 15:44:50 -0700144 "mov %[result], x0\n\t" // Save the result
145 : [result] "=r" (result)
146 // Use the result from r0
147 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700148 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
Andreas Gampe525cde22014-04-22 15:44:50 -0700149#elif defined(__x86_64__)
150 // Note: Uses the native convention
151 // TODO: Set the thread?
152 __asm__ __volatile__(
153 "pushq $0\n\t" // Push nullptr to terminate quick stack
154 "pushq $0\n\t" // 16B alignment padding
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700155 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -0700156 "call *%%rax\n\t" // Call the stub
Andreas Gampef4e910b2014-04-29 16:55:52 -0700157 "addq $16, %%rsp\n\t" // Pop nullptr and padding
158 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe525cde22014-04-22 15:44:50 -0700159 : "=a" (result)
160 // Use the result from rax
161 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code)
162 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampef4e910b2014-04-29 16:55:52 -0700163 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
Andreas Gampe525cde22014-04-22 15:44:50 -0700164 // TODO: Should we clobber the other registers?
Andreas Gampe525cde22014-04-22 15:44:50 -0700165#else
166 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
167 result = 0;
168#endif
169 // Pop transition.
170 self->PopManagedStackFragment(fragment);
171 return result;
172 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700173
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700174 // TODO: Set up a frame according to referrer's specs.
175 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
176 mirror::ArtMethod* referrer) {
177 // Push a transition back into managed code onto the linked list in thread.
178 ManagedStack fragment;
179 self->PushManagedStackFragment(&fragment);
180
181 size_t result;
182#if defined(__i386__)
183 // TODO: Set the thread?
184 __asm__ __volatile__(
185 "pushl %[referrer]\n\t" // Store referrer
186 "call *%%edi\n\t" // Call the stub
187 "addl $4, %%esp" // Pop referrer
188 : "=a" (result)
189 // Use the result from eax
190 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
191 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
192 : ); // clobber.
193 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
194 // but compilation fails when declaring that.
195#elif defined(__arm__)
196 __asm__ __volatile__(
197 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
198 ".cfi_adjust_cfa_offset 52\n\t"
199 "push {r9}\n\t"
200 ".cfi_adjust_cfa_offset 4\n\t"
201 "mov r9, %[referrer]\n\n"
202 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
203 ".cfi_adjust_cfa_offset 8\n\t"
204 "ldr r9, [sp, #8]\n\t"
205
206 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
207 "sub sp, sp, #20\n\t"
208 "str %[arg0], [sp]\n\t"
209 "str %[arg1], [sp, #4]\n\t"
210 "str %[arg2], [sp, #8]\n\t"
211 "str %[code], [sp, #12]\n\t"
212 "str %[self], [sp, #16]\n\t"
213 "ldr r0, [sp]\n\t"
214 "ldr r1, [sp, #4]\n\t"
215 "ldr r2, [sp, #8]\n\t"
216 "ldr r3, [sp, #12]\n\t"
217 "ldr r9, [sp, #16]\n\t"
218 "add sp, sp, #20\n\t"
219
220 "blx r3\n\t" // Call the stub
221 "add sp, sp, #12\n\t" // Pop nullptr and padding
222 ".cfi_adjust_cfa_offset -12\n\t"
223 "pop {r1-r12, lr}\n\t" // Restore state
224 ".cfi_adjust_cfa_offset -52\n\t"
225 "mov %[result], r0\n\t" // Save the result
226 : [result] "=r" (result)
227 // Use the result from r0
228 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
229 [referrer] "r"(referrer)
230 : ); // clobber.
231#elif defined(__aarch64__)
232 __asm__ __volatile__(
233 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
234 ".cfi_adjust_cfa_offset 48\n\t"
235 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
236 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
237 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
238
239 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
240 "sub sp, sp, #48\n\t"
241 "str %[arg0], [sp]\n\t"
242 "str %[arg1], [sp, #8]\n\t"
243 "str %[arg2], [sp, #16]\n\t"
244 "str %[code], [sp, #24]\n\t"
245 "str %[self], [sp, #32]\n\t"
246 "ldr x0, [sp]\n\t"
247 "ldr x1, [sp, #8]\n\t"
248 "ldr x2, [sp, #16]\n\t"
249 "ldr x3, [sp, #24]\n\t"
250 "ldr x18, [sp, #32]\n\t"
251 "add sp, sp, #48\n\t"
252
253 "blr x3\n\t" // Call the stub
254 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
255 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
256 "ldr x30, [sp, #40]\n\t" // Restore xLR
257 "add sp, sp, #48\n\t" // Free stack space
258 ".cfi_adjust_cfa_offset -48\n\t"
259
260 "mov %[result], x0\n\t" // Save the result
261 : [result] "=r" (result)
262 // Use the result from r0
263 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
264 [referrer] "r"(referrer)
265 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
266#elif defined(__x86_64__)
267 // Note: Uses the native convention
268 // TODO: Set the thread?
269 __asm__ __volatile__(
270 "pushq %[referrer]\n\t" // Push referrer
271 "pushq (%%rsp)\n\t" // & 16B alignment padding
272 ".cfi_adjust_cfa_offset 16\n\t"
273 "call *%%rax\n\t" // Call the stub
274 "addq $16, %%rsp\n\t" // Pop nullptr and padding
275 ".cfi_adjust_cfa_offset -16\n\t"
276 : "=a" (result)
277 // Use the result from rax
278 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
279 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
280 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
281 // TODO: Should we clobber the other registers?
282#else
283 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
284 result = 0;
285#endif
286 // Pop transition.
287 self->PopManagedStackFragment(fragment);
288 return result;
289 }
290
291 // Method with 32b arg0, 64b arg1
292 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
293 mirror::ArtMethod* referrer) {
294#if defined(__x86_64__) || defined(__aarch64__)
295 // Just pass through.
296 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
297#else
298 // Need to split up arguments.
299 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
300 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
301
302 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
303#endif
304 }
305
306 // Method with 32b arg0, 32b arg1, 64b arg2
307 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
308 Thread* self, mirror::ArtMethod* referrer) {
309#if defined(__x86_64__) || defined(__aarch64__)
310 // Just pass through.
311 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
312#else
313 // TODO: Needs 4-param invoke.
314 return 0;
315#endif
316 }
Andreas Gampe525cde22014-04-22 15:44:50 -0700317};
318
319
320#if defined(__i386__) || defined(__x86_64__)
321extern "C" void art_quick_memcpy(void);
322#endif
323
324TEST_F(StubTest, Memcpy) {
325#if defined(__i386__) || defined(__x86_64__)
326 Thread* self = Thread::Current();
327
328 uint32_t orig[20];
329 uint32_t trg[20];
330 for (size_t i = 0; i < 20; ++i) {
331 orig[i] = i;
332 trg[i] = 0;
333 }
334
335 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
336 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
337
338 EXPECT_EQ(orig[0], trg[0]);
339
340 for (size_t i = 1; i < 4; ++i) {
341 EXPECT_NE(orig[i], trg[i]);
342 }
343
344 for (size_t i = 4; i < 14; ++i) {
345 EXPECT_EQ(orig[i], trg[i]);
346 }
347
348 for (size_t i = 14; i < 20; ++i) {
349 EXPECT_NE(orig[i], trg[i]);
350 }
351
352 // TODO: Test overlapping?
353
354#else
355 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
356 // Force-print to std::cout so it's also outside the logcat.
357 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
358#endif
359}
360
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700361#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700362extern "C" void art_quick_lock_object(void);
363#endif
364
365TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700366#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700367 static constexpr size_t kThinLockLoops = 100;
368
Andreas Gampe525cde22014-04-22 15:44:50 -0700369 Thread* self = Thread::Current();
370 // Create an object
371 ScopedObjectAccess soa(self);
372 // garbage is created during ClassLinker::Init
373
374 SirtRef<mirror::String> obj(soa.Self(),
375 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!"));
376 LockWord lock = obj->GetLockWord(false);
377 LockWord::LockState old_state = lock.GetState();
378 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
379
380 Invoke3(reinterpret_cast<size_t>(obj.get()), 0U, 0U,
381 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
382
383 LockWord lock_after = obj->GetLockWord(false);
384 LockWord::LockState new_state = lock_after.GetState();
385 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700386 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
387
388 for (size_t i = 1; i < kThinLockLoops; ++i) {
389 Invoke3(reinterpret_cast<size_t>(obj.get()), 0U, 0U,
390 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
391
392 // Check we're at lock count i
393
394 LockWord l_inc = obj->GetLockWord(false);
395 LockWord::LockState l_inc_state = l_inc.GetState();
396 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
397 EXPECT_EQ(l_inc.ThinLockCount(), i);
398 }
399
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700400 // Force a fat lock by running identity hashcode to fill up lock word.
401 SirtRef<mirror::Object> obj2(soa.Self(), mirror::String::AllocFromModifiedUtf8(soa.Self(),
402 "hello, world!"));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700403
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700404 obj2->IdentityHashCode();
405
406 Invoke3(reinterpret_cast<size_t>(obj2.get()), 0U, 0U,
407 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
408
409 LockWord lock_after2 = obj2->GetLockWord(false);
410 LockWord::LockState new_state2 = lock_after2.GetState();
411 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
412 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
413
414 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700415#else
416 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
417 // Force-print to std::cout so it's also outside the logcat.
418 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
419#endif
420}
421
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700422
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700423class RandGen {
424 public:
425 explicit RandGen(uint32_t seed) : val_(seed) {}
426
427 uint32_t next() {
428 val_ = val_ * 48271 % 2147483647 + 13;
429 return val_;
430 }
431
432 uint32_t val_;
433};
434
435
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700436#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700437extern "C" void art_quick_lock_object(void);
438extern "C" void art_quick_unlock_object(void);
439#endif
440
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700441// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
442static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
443#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700444 static constexpr size_t kThinLockLoops = 100;
445
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700446 Thread* self = Thread::Current();
447 // Create an object
448 ScopedObjectAccess soa(self);
449 // garbage is created during ClassLinker::Init
450
451 SirtRef<mirror::String> obj(soa.Self(),
452 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!"));
453 LockWord lock = obj->GetLockWord(false);
454 LockWord::LockState old_state = lock.GetState();
455 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
456
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700457 test->Invoke3(reinterpret_cast<size_t>(obj.get()), 0U, 0U,
458 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700459
460 // This should be an illegal monitor state.
461 EXPECT_TRUE(self->IsExceptionPending());
462 self->ClearException();
463
464 LockWord lock_after = obj->GetLockWord(false);
465 LockWord::LockState new_state = lock_after.GetState();
466 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700467
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700468 test->Invoke3(reinterpret_cast<size_t>(obj.get()), 0U, 0U,
469 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700470
471 LockWord lock_after2 = obj->GetLockWord(false);
472 LockWord::LockState new_state2 = lock_after2.GetState();
473 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
474
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700475 test->Invoke3(reinterpret_cast<size_t>(obj.get()), 0U, 0U,
476 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700477
478 LockWord lock_after3 = obj->GetLockWord(false);
479 LockWord::LockState new_state3 = lock_after3.GetState();
480 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
481
482 // Stress test:
483 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
484 // each step.
485
486 RandGen r(0x1234);
487
488 constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
489 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700490 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700491
492 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700493 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700494 SirtRef<mirror::String>* objects[kNumberOfLocks];
495
496 // Initialize = allocate.
497 for (size_t i = 0; i < kNumberOfLocks; ++i) {
498 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700499 fat[i] = false;
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700500 objects[i] = new SirtRef<mirror::String>(soa.Self(),
501 mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
502 }
503
504 for (size_t i = 0; i < kIterations; ++i) {
505 // Select which lock to update.
506 size_t index = r.next() % kNumberOfLocks;
507
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700508 // Make lock fat?
509 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
510 fat[index] = true;
511 objects[index]->get()->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700512
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700513 LockWord lock_iter = objects[index]->get()->GetLockWord(false);
514 LockWord::LockState iter_state = lock_iter.GetState();
515 if (counts[index] == 0) {
516 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
517 } else {
518 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
519 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700520 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700521 bool lock; // Whether to lock or unlock in this step.
522 if (counts[index] == 0) {
523 lock = true;
524 } else if (counts[index] == kThinLockLoops) {
525 lock = false;
526 } else {
527 // Randomly.
528 lock = r.next() % 2 == 0;
529 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700530
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700531 if (lock) {
532 test-> Invoke3(reinterpret_cast<size_t>(objects[index]->get()), 0U, 0U,
533 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
534 counts[index]++;
535 } else {
536 test->Invoke3(reinterpret_cast<size_t>(objects[index]->get()), 0U, 0U,
537 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
538 counts[index]--;
539 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700540
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700541 EXPECT_FALSE(self->IsExceptionPending());
542
543 // Check the new state.
544 LockWord lock_iter = objects[index]->get()->GetLockWord(true);
545 LockWord::LockState iter_state = lock_iter.GetState();
546 if (fat[index]) {
547 // Abuse MonitorInfo.
548 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
549 MonitorInfo info(objects[index]->get());
550 EXPECT_EQ(counts[index], info.entry_count_) << index;
551 } else {
552 if (counts[index] > 0) {
553 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
554 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
555 } else {
556 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
557 }
558 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700559 }
560 }
561
562 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
563 // Go reverse order to correctly handle SirtRefs.
564 for (size_t i = 0; i < kNumberOfLocks; ++i) {
565 size_t index = kNumberOfLocks - 1 - i;
566 size_t count = counts[index];
567 while (count > 0) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700568 test->Invoke3(reinterpret_cast<size_t>(objects[index]->get()), 0U, 0U,
569 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700570
571 count--;
572 }
573
574 LockWord lock_after4 = objects[index]->get()->GetLockWord(false);
575 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700576 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
577 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700578
579 delete objects[index];
580 }
581
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700582 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700583#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700584 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700585 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700586 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700587#endif
588}
589
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700590TEST_F(StubTest, UnlockObject) {
591 TestUnlockObject(this);
592}
Andreas Gampe525cde22014-04-22 15:44:50 -0700593
594#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
595extern "C" void art_quick_check_cast(void);
596#endif
597
598TEST_F(StubTest, CheckCast) {
599#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
600 Thread* self = Thread::Current();
601 // Find some classes.
602 ScopedObjectAccess soa(self);
603 // garbage is created during ClassLinker::Init
604
605 SirtRef<mirror::Class> c(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
606 "[Ljava/lang/Object;"));
607 SirtRef<mirror::Class> c2(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
608 "[Ljava/lang/String;"));
609
610 EXPECT_FALSE(self->IsExceptionPending());
611
612 Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(c.get()), 0U,
613 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
614
615 EXPECT_FALSE(self->IsExceptionPending());
616
617 Invoke3(reinterpret_cast<size_t>(c2.get()), reinterpret_cast<size_t>(c2.get()), 0U,
618 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
619
620 EXPECT_FALSE(self->IsExceptionPending());
621
622 Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(c2.get()), 0U,
623 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
624
625 EXPECT_FALSE(self->IsExceptionPending());
626
627 // TODO: Make the following work. But that would require correct managed frames.
628
629 Invoke3(reinterpret_cast<size_t>(c2.get()), reinterpret_cast<size_t>(c.get()), 0U,
630 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
631
632 EXPECT_TRUE(self->IsExceptionPending());
633 self->ClearException();
634
635#else
636 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
637 // Force-print to std::cout so it's also outside the logcat.
638 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
639#endif
640}
641
642
Andreas Gampef4e910b2014-04-29 16:55:52 -0700643#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700644extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
645// Do not check non-checked ones, we'd need handlers and stuff...
646#endif
647
648TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700649 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
650
Andreas Gampef4e910b2014-04-29 16:55:52 -0700651#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700652 Thread* self = Thread::Current();
653 // Create an object
654 ScopedObjectAccess soa(self);
655 // garbage is created during ClassLinker::Init
656
657 SirtRef<mirror::Class> c(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
658 "Ljava/lang/Object;"));
659 SirtRef<mirror::Class> c2(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
660 "Ljava/lang/String;"));
661 SirtRef<mirror::Class> ca(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
662 "[Ljava/lang/String;"));
663
664 // Build a string array of size 1
665 SirtRef<mirror::ObjectArray<mirror::Object> > array(soa.Self(),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700666 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.get(), 10));
Andreas Gampe525cde22014-04-22 15:44:50 -0700667
668 // Build a string -> should be assignable
669 SirtRef<mirror::Object> str_obj(soa.Self(),
670 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!"));
671
672 // Build a generic object -> should fail assigning
673 SirtRef<mirror::Object> obj_obj(soa.Self(), c->AllocObject(soa.Self()));
674
675 // Play with it...
676
677 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700678 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700679
680 EXPECT_FALSE(self->IsExceptionPending());
681
682 Invoke3(reinterpret_cast<size_t>(array.get()), 0U, reinterpret_cast<size_t>(str_obj.get()),
683 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
684
685 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700686 EXPECT_EQ(str_obj.get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
Andreas Gampef4e910b2014-04-29 16:55:52 -0700688 Invoke3(reinterpret_cast<size_t>(array.get()), 1U, reinterpret_cast<size_t>(str_obj.get()),
689 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
690
691 EXPECT_FALSE(self->IsExceptionPending());
692 EXPECT_EQ(str_obj.get(), array->Get(1));
693
694 Invoke3(reinterpret_cast<size_t>(array.get()), 2U, reinterpret_cast<size_t>(str_obj.get()),
695 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
696
697 EXPECT_FALSE(self->IsExceptionPending());
698 EXPECT_EQ(str_obj.get(), array->Get(2));
699
700 Invoke3(reinterpret_cast<size_t>(array.get()), 3U, reinterpret_cast<size_t>(str_obj.get()),
701 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
702
703 EXPECT_FALSE(self->IsExceptionPending());
704 EXPECT_EQ(str_obj.get(), array->Get(3));
705
706 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700707
708 Invoke3(reinterpret_cast<size_t>(array.get()), 0U, reinterpret_cast<size_t>(nullptr),
709 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
710
711 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700712 EXPECT_EQ(nullptr, array->Get(0));
713
714 Invoke3(reinterpret_cast<size_t>(array.get()), 1U, reinterpret_cast<size_t>(nullptr),
715 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
716
717 EXPECT_FALSE(self->IsExceptionPending());
718 EXPECT_EQ(nullptr, array->Get(1));
719
720 Invoke3(reinterpret_cast<size_t>(array.get()), 2U, reinterpret_cast<size_t>(nullptr),
721 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
722
723 EXPECT_FALSE(self->IsExceptionPending());
724 EXPECT_EQ(nullptr, array->Get(2));
725
726 Invoke3(reinterpret_cast<size_t>(array.get()), 3U, reinterpret_cast<size_t>(nullptr),
727 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
728
729 EXPECT_FALSE(self->IsExceptionPending());
730 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700731
732 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
733
734 // 2) Failure cases (str into str[])
735 // 2.1) Array = null
736 // TODO: Throwing NPE needs actual DEX code
737
738// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.get()),
739// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
740//
741// EXPECT_TRUE(self->IsExceptionPending());
742// self->ClearException();
743
744 // 2.2) Index < 0
745
746 Invoke3(reinterpret_cast<size_t>(array.get()), static_cast<size_t>(-1),
747 reinterpret_cast<size_t>(str_obj.get()),
748 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
749
750 EXPECT_TRUE(self->IsExceptionPending());
751 self->ClearException();
752
753 // 2.3) Index > 0
754
Andreas Gampef4e910b2014-04-29 16:55:52 -0700755 Invoke3(reinterpret_cast<size_t>(array.get()), 10U, reinterpret_cast<size_t>(str_obj.get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700756 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
757
758 EXPECT_TRUE(self->IsExceptionPending());
759 self->ClearException();
760
761 // 3) Failure cases (obj into str[])
762
763 Invoke3(reinterpret_cast<size_t>(array.get()), 0U, reinterpret_cast<size_t>(obj_obj.get()),
764 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
765
766 EXPECT_TRUE(self->IsExceptionPending());
767 self->ClearException();
768
769 // Tests done.
770#else
771 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
772 // Force-print to std::cout so it's also outside the logcat.
773 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
774#endif
775}
776
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700777TEST_F(StubTest, AllocObject) {
778 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
779
780#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
781 // TODO: Check the "Unresolved" allocation stubs
782
783 Thread* self = Thread::Current();
784 // Create an object
785 ScopedObjectAccess soa(self);
786 // garbage is created during ClassLinker::Init
787
788 SirtRef<mirror::Class> c(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
789 "Ljava/lang/Object;"));
790
791 // Play with it...
792
793 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700794 {
795 // Use an arbitrary method from c to use as referrer
796 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
797 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
798 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700799 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700800 self);
801
802 EXPECT_FALSE(self->IsExceptionPending());
803 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
804 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
805 EXPECT_EQ(c.get(), obj->GetClass());
806 VerifyObject(obj);
807 }
808
809 {
810 // We can use nullptr in the second argument as we do not need a method here (not used in
811 // resolved/initialized cases)
812 size_t result = Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700813 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700814 self);
815
816 EXPECT_FALSE(self->IsExceptionPending());
817 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
818 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
819 EXPECT_EQ(c.get(), obj->GetClass());
820 VerifyObject(obj);
821 }
822
823 {
824 // We can use nullptr in the second argument as we do not need a method here (not used in
825 // resolved/initialized cases)
826 size_t result = Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700827 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700828 self);
829
830 EXPECT_FALSE(self->IsExceptionPending());
831 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
832 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
833 EXPECT_EQ(c.get(), obj->GetClass());
834 VerifyObject(obj);
835 }
836
837 // Failure tests.
838
839 // Out-of-memory.
840 {
841 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
842
843 // Array helps to fill memory faster.
844 SirtRef<mirror::Class> ca(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
845 "[Ljava/lang/Object;"));
846 std::vector<SirtRef<mirror::Object>*> sirt_refs;
847 // Start allocating with 128K
848 size_t length = 128 * KB / 4;
849 while (length > 10) {
850 SirtRef<mirror::Object>* ref = new SirtRef<mirror::Object>(soa.Self(),
851 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(),
852 ca.get(),
853 length/4));
854 if (self->IsExceptionPending() || ref->get() == nullptr) {
855 self->ClearException();
856 delete ref;
857
858 // Try a smaller length
859 length = length / 8;
860 // Use at most half the reported free space.
861 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
862 if (length * 8 > mem) {
863 length = mem / 8;
864 }
865 } else {
866 sirt_refs.push_back(ref);
867 }
868 }
Brian Carlstrom4d466a82014-05-08 19:05:29 -0700869 LOG(INFO) << "Used " << sirt_refs.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700870
871 // Allocate simple objects till it fails.
872 while (!self->IsExceptionPending()) {
873 SirtRef<mirror::Object>* ref = new SirtRef<mirror::Object>(soa.Self(),
874 c->AllocObject(soa.Self()));
875 if (!self->IsExceptionPending() && ref->get() != nullptr) {
876 sirt_refs.push_back(ref);
877 } else {
878 delete ref;
879 }
880 }
881 self->ClearException();
882
883 size_t result = Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700884 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700885 self);
886
887 EXPECT_TRUE(self->IsExceptionPending());
888 self->ClearException();
889 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
890
891 // Release all the allocated objects.
892 // Need to go backward to release SirtRef in the right order.
893 auto it = sirt_refs.rbegin();
894 auto end = sirt_refs.rend();
895 for (; it != end; ++it) {
896 delete *it;
897 }
898 }
899
900 // Tests done.
901#else
902 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
903 // Force-print to std::cout so it's also outside the logcat.
904 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
905#endif
906}
907
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700908TEST_F(StubTest, AllocObjectArray) {
909 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
910
911#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
912 // TODO: Check the "Unresolved" allocation stubs
913
914 Thread* self = Thread::Current();
915 // Create an object
916 ScopedObjectAccess soa(self);
917 // garbage is created during ClassLinker::Init
918
919 SirtRef<mirror::Class> c(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
920 "[Ljava/lang/Object;"));
921
922 // Needed to have a linked method.
923 SirtRef<mirror::Class> c_obj(soa.Self(), class_linker_->FindSystemClass(soa.Self(),
924 "Ljava/lang/Object;"));
925
926 // Play with it...
927
928 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700929
930 // For some reason this does not work, as the type_idx is artificial and outside what the
931 // resolved types of c_obj allow...
932
933 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700934 // Use an arbitrary method from c to use as referrer
935 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
936 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
937 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700938 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700939 self);
940
941 EXPECT_FALSE(self->IsExceptionPending());
942 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
943 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
944 EXPECT_EQ(c.get(), obj->GetClass());
945 VerifyObject(obj);
946 EXPECT_EQ(obj->GetLength(), 10);
947 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700948
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700949 {
950 // We can use nullptr in the second argument as we do not need a method here (not used in
951 // resolved/initialized cases)
952 size_t result = Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700953 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700954 self);
955
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700956 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700957 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
958 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
959 EXPECT_TRUE(obj->IsArrayInstance());
960 EXPECT_TRUE(obj->IsObjectArray());
961 EXPECT_EQ(c.get(), obj->GetClass());
962 VerifyObject(obj);
963 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
964 EXPECT_EQ(array->GetLength(), 10);
965 }
966
967 // Failure tests.
968
969 // Out-of-memory.
970 {
971 size_t result = Invoke3(reinterpret_cast<size_t>(c.get()), reinterpret_cast<size_t>(nullptr),
972 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700973 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700974 self);
975
976 EXPECT_TRUE(self->IsExceptionPending());
977 self->ClearException();
978 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
979 }
980
981 // Tests done.
982#else
983 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
984 // Force-print to std::cout so it's also outside the logcat.
985 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
986#endif
987}
988
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700989
Andreas Gampe266340d2014-05-02 07:55:24 -0700990#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700991extern "C" void art_quick_string_compareto(void);
992#endif
993
994TEST_F(StubTest, StringCompareTo) {
995 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
996
Andreas Gampe266340d2014-05-02 07:55:24 -0700997#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700998 // TODO: Check the "Unresolved" allocation stubs
999
1000 Thread* self = Thread::Current();
1001 ScopedObjectAccess soa(self);
1002 // garbage is created during ClassLinker::Init
1003
1004 // Create some strings
1005 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001006 // Setup: The first half is standard. The second half uses a non-zero offset.
1007 // TODO: Shared backing arrays.
1008 constexpr size_t base_string_count = 7;
1009 const char* c[base_string_count] = { "", "", "a", "aa", "ab", "aac", "aac" , };
1010
1011 constexpr size_t string_count = 2 * base_string_count;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001012
1013 SirtRef<mirror::String>* s[string_count];
1014
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001015 for (size_t i = 0; i < base_string_count; ++i) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001016 s[i] = new SirtRef<mirror::String>(soa.Self(), mirror::String::AllocFromModifiedUtf8(soa.Self(),
1017 c[i]));
1018 }
1019
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001020 RandGen r(0x1234);
1021
1022 for (size_t i = base_string_count; i < string_count; ++i) {
1023 s[i] = new SirtRef<mirror::String>(soa.Self(), mirror::String::AllocFromModifiedUtf8(soa.Self(),
1024 c[i - base_string_count]));
1025 int32_t length = s[i]->get()->GetLength();
1026 if (length > 1) {
1027 // Set a random offset and length.
1028 int32_t new_offset = 1 + (r.next() % (length - 1));
1029 int32_t rest = length - new_offset - 1;
1030 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1031
1032 s[i]->get()->SetField32<false>(mirror::String::CountOffset(), new_length);
1033 s[i]->get()->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
1034 }
1035 }
1036
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001037 // TODO: wide characters
1038
1039 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001040 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1041 // rely on String::CompareTo being correct.
1042 int32_t expected[string_count][string_count];
1043 for (size_t x = 0; x < string_count; ++x) {
1044 for (size_t y = 0; y < string_count; ++y) {
1045 expected[x][y] = s[x]->get()->CompareTo(s[y]->get());
1046 }
1047 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001048
1049 // Play with it...
1050
1051 for (size_t x = 0; x < string_count; ++x) {
1052 for (size_t y = 0; y < string_count; ++y) {
1053 // Test string_compareto x y
1054 size_t result = Invoke3(reinterpret_cast<size_t>(s[x]->get()),
1055 reinterpret_cast<size_t>(s[y]->get()), 0U,
1056 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1057
1058 EXPECT_FALSE(self->IsExceptionPending());
1059
1060 // The result is a 32b signed integer
1061 union {
1062 size_t r;
1063 int32_t i;
1064 } conv;
1065 conv.r = result;
1066 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001067 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1068 conv.r;
1069 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1070 conv.r;
1071 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1072 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001073 }
1074 }
1075
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001076 // TODO: Deallocate things.
1077
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001078 // Tests done.
1079#else
1080 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1081 // Force-print to std::cout so it's also outside the logcat.
1082 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1083 std::endl;
1084#endif
1085}
1086
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001087
1088#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1089extern "C" void art_quick_set32_static(void);
1090extern "C" void art_quick_get32_static(void);
1091#endif
1092
1093static void GetSet32Static(SirtRef<mirror::Object>* obj, SirtRef<mirror::ArtField>* f, Thread* self,
1094 mirror::ArtMethod* referrer, StubTest* test)
1095 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1096#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1097 constexpr size_t num_values = 7;
1098 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1099
1100 for (size_t i = 0; i < num_values; ++i) {
1101 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1102 static_cast<size_t>(values[i]),
1103 0U,
1104 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1105 self,
1106 referrer);
1107
1108 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1109 0U, 0U,
1110 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1111 self,
1112 referrer);
1113
1114 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1115 }
1116#else
1117 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1118 // Force-print to std::cout so it's also outside the logcat.
1119 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1120#endif
1121}
1122
1123
1124#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1125extern "C" void art_quick_set32_instance(void);
1126extern "C" void art_quick_get32_instance(void);
1127#endif
1128
1129static void GetSet32Instance(SirtRef<mirror::Object>* obj, SirtRef<mirror::ArtField>* f,
1130 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1131 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1132#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1133 constexpr size_t num_values = 7;
1134 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1135
1136 for (size_t i = 0; i < num_values; ++i) {
1137 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1138 reinterpret_cast<size_t>(obj->get()),
1139 static_cast<size_t>(values[i]),
1140 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1141 self,
1142 referrer);
1143
1144 int32_t res = f->get()->GetInt(obj->get());
1145 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1146
1147 res++;
1148 f->get()->SetInt<false>(obj->get(), res);
1149
1150 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1151 reinterpret_cast<size_t>(obj->get()),
1152 0U,
1153 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1154 self,
1155 referrer);
1156 EXPECT_EQ(res, static_cast<int32_t>(res2));
1157 }
1158#else
1159 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1160 // Force-print to std::cout so it's also outside the logcat.
1161 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1162#endif
1163}
1164
1165
1166#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1167extern "C" void art_quick_set_obj_static(void);
1168extern "C" void art_quick_get_obj_static(void);
1169
1170static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1171 mirror::ArtMethod* referrer, StubTest* test)
1172 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1173 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1174 reinterpret_cast<size_t>(val),
1175 0U,
1176 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1177 self,
1178 referrer);
1179
1180 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1181 0U, 0U,
1182 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1183 self,
1184 referrer);
1185
1186 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1187}
1188#endif
1189
1190static void GetSetObjStatic(SirtRef<mirror::Object>* obj, SirtRef<mirror::ArtField>* f, Thread* self,
1191 mirror::ArtMethod* referrer, StubTest* test)
1192 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1193#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1194 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1195
1196 // Allocate a string object for simplicity.
1197 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1198 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1199
1200 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1201#else
1202 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1203 // Force-print to std::cout so it's also outside the logcat.
1204 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1205#endif
1206}
1207
1208
1209#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1210extern "C" void art_quick_set_obj_instance(void);
1211extern "C" void art_quick_get_obj_instance(void);
1212
1213static void set_and_check_instance(SirtRef<mirror::ArtField>* f, mirror::Object* trg,
1214 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1215 StubTest* test)
1216 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1217 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1218 reinterpret_cast<size_t>(trg),
1219 reinterpret_cast<size_t>(val),
1220 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1221 self,
1222 referrer);
1223
1224 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1225 reinterpret_cast<size_t>(trg),
1226 0U,
1227 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1228 self,
1229 referrer);
1230
1231 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1232
1233 EXPECT_EQ(val, f->get()->GetObj(trg));
1234}
1235#endif
1236
1237static void GetSetObjInstance(SirtRef<mirror::Object>* obj, SirtRef<mirror::ArtField>* f,
1238 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1239 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1240#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1241 set_and_check_instance(f, obj->get(), nullptr, self, referrer, test);
1242
1243 // Allocate a string object for simplicity.
1244 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1245 set_and_check_instance(f, obj->get(), str, self, referrer, test);
1246
1247 set_and_check_instance(f, obj->get(), nullptr, self, referrer, test);
1248#else
1249 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1250 // Force-print to std::cout so it's also outside the logcat.
1251 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1252#endif
1253}
1254
1255
1256// TODO: Complete these tests for 32b architectures.
1257
1258#if defined(__x86_64__) || defined(__aarch64__)
1259extern "C" void art_quick_set64_static(void);
1260extern "C" void art_quick_get64_static(void);
1261#endif
1262
1263static void GetSet64Static(SirtRef<mirror::Object>* obj, SirtRef<mirror::ArtField>* f, Thread* self,
1264 mirror::ArtMethod* referrer, StubTest* test)
1265 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1266#if defined(__x86_64__) || defined(__aarch64__)
1267 constexpr size_t num_values = 8;
1268 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1269
1270 for (size_t i = 0; i < num_values; ++i) {
1271 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1272 values[i],
1273 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1274 self,
1275 referrer);
1276
1277 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1278 0U, 0U,
1279 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1280 self,
1281 referrer);
1282
1283 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1284 }
1285#else
1286 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1287 // Force-print to std::cout so it's also outside the logcat.
1288 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1289#endif
1290}
1291
1292
1293#if defined(__x86_64__) || defined(__aarch64__)
1294extern "C" void art_quick_set64_instance(void);
1295extern "C" void art_quick_get64_instance(void);
1296#endif
1297
1298static void GetSet64Instance(SirtRef<mirror::Object>* obj, SirtRef<mirror::ArtField>* f,
1299 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1300 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1301#if defined(__x86_64__) || defined(__aarch64__)
1302 constexpr size_t num_values = 8;
1303 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1304
1305 for (size_t i = 0; i < num_values; ++i) {
1306 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1307 reinterpret_cast<size_t>(obj->get()),
1308 static_cast<size_t>(values[i]),
1309 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1310 self,
1311 referrer);
1312
1313 int64_t res = f->get()->GetLong(obj->get());
1314 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1315
1316 res++;
1317 f->get()->SetLong<false>(obj->get(), res);
1318
1319 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1320 reinterpret_cast<size_t>(obj->get()),
1321 0U,
1322 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1323 self,
1324 referrer);
1325 EXPECT_EQ(res, static_cast<int64_t>(res2));
1326 }
1327#else
1328 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1329 // Force-print to std::cout so it's also outside the logcat.
1330 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1331#endif
1332}
1333
1334static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1335 // garbage is created during ClassLinker::Init
1336
1337 JNIEnv* env = Thread::Current()->GetJniEnv();
1338 jclass jc = env->FindClass("AllFields");
1339 CHECK(jc != NULL);
1340 jobject o = env->AllocObject(jc);
1341 CHECK(o != NULL);
1342
1343 ScopedObjectAccess soa(self);
1344 SirtRef<mirror::Object> obj(self, soa.Decode<mirror::Object*>(o));
1345
1346 SirtRef<mirror::Class> c(self, obj->GetClass());
1347
1348 // Need a method as a referrer
1349 SirtRef<mirror::ArtMethod> m(self, c->GetDirectMethod(0));
1350
1351 // Play with it...
1352
1353 // Static fields.
1354 {
1355 SirtRef<mirror::ObjectArray<mirror::ArtField>> fields(self, c.get()->GetSFields());
1356 int32_t num_fields = fields->GetLength();
1357 for (int32_t i = 0; i < num_fields; ++i) {
1358 SirtRef<mirror::ArtField> f(self, fields->Get(i));
1359
1360 FieldHelper fh(f.get());
1361 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1362 switch (type) {
1363 case Primitive::Type::kPrimInt:
1364 if (test_type == type) {
1365 GetSet32Static(&obj, &f, self, m.get(), test);
1366 }
1367 break;
1368
1369 case Primitive::Type::kPrimLong:
1370 if (test_type == type) {
1371 GetSet64Static(&obj, &f, self, m.get(), test);
1372 }
1373 break;
1374
1375 case Primitive::Type::kPrimNot:
1376 // Don't try array.
1377 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
1378 GetSetObjStatic(&obj, &f, self, m.get(), test);
1379 }
1380 break;
1381
1382 default:
1383 break; // Skip.
1384 }
1385 }
1386 }
1387
1388 // Instance fields.
1389 {
1390 SirtRef<mirror::ObjectArray<mirror::ArtField>> fields(self, c.get()->GetIFields());
1391 int32_t num_fields = fields->GetLength();
1392 for (int32_t i = 0; i < num_fields; ++i) {
1393 SirtRef<mirror::ArtField> f(self, fields->Get(i));
1394
1395 FieldHelper fh(f.get());
1396 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1397 switch (type) {
1398 case Primitive::Type::kPrimInt:
1399 if (test_type == type) {
1400 GetSet32Instance(&obj, &f, self, m.get(), test);
1401 }
1402 break;
1403
1404 case Primitive::Type::kPrimLong:
1405 if (test_type == type) {
1406 GetSet64Instance(&obj, &f, self, m.get(), test);
1407 }
1408 break;
1409
1410 case Primitive::Type::kPrimNot:
1411 // Don't try array.
1412 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
1413 GetSetObjInstance(&obj, &f, self, m.get(), test);
1414 }
1415 break;
1416
1417 default:
1418 break; // Skip.
1419 }
1420 }
1421 }
1422
1423 // TODO: Deallocate things.
1424}
1425
1426
1427TEST_F(StubTest, Fields32) {
1428 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1429
1430 Thread* self = Thread::Current();
1431
1432 self->TransitionFromSuspendedToRunnable();
1433 LoadDex("AllFields");
1434 bool started = runtime_->Start();
1435 CHECK(started);
1436
1437 TestFields(self, this, Primitive::Type::kPrimInt);
1438}
1439
1440TEST_F(StubTest, FieldsObj) {
1441 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1442
1443 Thread* self = Thread::Current();
1444
1445 self->TransitionFromSuspendedToRunnable();
1446 LoadDex("AllFields");
1447 bool started = runtime_->Start();
1448 CHECK(started);
1449
1450 TestFields(self, this, Primitive::Type::kPrimNot);
1451}
1452
1453TEST_F(StubTest, Fields64) {
1454 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1455
1456 Thread* self = Thread::Current();
1457
1458 self->TransitionFromSuspendedToRunnable();
1459 LoadDex("AllFields");
1460 bool started = runtime_->Start();
1461 CHECK(started);
1462
1463 TestFields(self, this, Primitive::Type::kPrimLong);
1464}
1465
Andreas Gampe525cde22014-04-22 15:44:50 -07001466} // namespace art