blob: 497a61f2738e52e1df187f12d5a64aeb0aae8e3e [file] [log] [blame]
Mathieu Chartier590fee92013-09-13 13:46:47 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
18#define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
19
20#include "bump_pointer_space.h"
21
22namespace art {
23namespace gc {
24namespace space {
25
Ian Rogers6fac4472014-02-25 17:01:10 -080026inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated,
27 size_t* usable_size) {
28 num_bytes = RoundUp(num_bytes, kAlignment);
29 mirror::Object* ret = AllocNonvirtual(num_bytes);
30 if (LIKELY(ret != nullptr)) {
31 *bytes_allocated = num_bytes;
32 if (usable_size != nullptr) {
33 *usable_size = num_bytes;
34 }
35 }
36 return ret;
37}
38
Mathieu Chartier0651d412014-04-29 14:37:57 -070039inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes,
40 size_t* bytes_allocated,
41 size_t* usable_size) {
42 Locks::mutator_lock_->AssertExclusiveHeld(self);
43 num_bytes = RoundUp(num_bytes, kAlignment);
44 if (end_ + num_bytes > growth_end_) {
45 return nullptr;
46 }
47 mirror::Object* obj = reinterpret_cast<mirror::Object*>(end_);
48 end_ += num_bytes;
49 *bytes_allocated = num_bytes;
50 // Use the CAS free versions as an optimization.
51 objects_allocated_ = objects_allocated_ + 1;
52 bytes_allocated_ = bytes_allocated_ + num_bytes;
53 if (UNLIKELY(usable_size != nullptr)) {
54 *usable_size = num_bytes;
55 }
56 return obj;
57}
58
Mathieu Chartier692fafd2013-11-29 17:24:40 -080059inline mirror::Object* BumpPointerSpace::AllocNonvirtualWithoutAccounting(size_t num_bytes) {
60 DCHECK(IsAligned<kAlignment>(num_bytes));
Mathieu Chartier590fee92013-09-13 13:46:47 -070061 byte* old_end;
62 byte* new_end;
63 do {
64 old_end = end_;
65 new_end = old_end + num_bytes;
66 // If there is no more room in the region, we are out of memory.
67 if (UNLIKELY(new_end > growth_end_)) {
68 return nullptr;
69 }
Ian Rogers55b27642014-01-23 16:37:07 -080070 } while (!__sync_bool_compare_and_swap(reinterpret_cast<volatile intptr_t*>(&end_),
71 reinterpret_cast<intptr_t>(old_end),
72 reinterpret_cast<intptr_t>(new_end)));
Mathieu Chartier590fee92013-09-13 13:46:47 -070073 return reinterpret_cast<mirror::Object*>(old_end);
74}
75
Mathieu Chartier692fafd2013-11-29 17:24:40 -080076inline mirror::Object* BumpPointerSpace::AllocNonvirtual(size_t num_bytes) {
77 mirror::Object* ret = AllocNonvirtualWithoutAccounting(num_bytes);
78 if (ret != nullptr) {
Ian Rogersb122a4b2013-11-19 18:00:50 -080079 objects_allocated_.FetchAndAdd(1);
80 bytes_allocated_.FetchAndAdd(num_bytes);
Mathieu Chartier692fafd2013-11-29 17:24:40 -080081 }
82 return ret;
83}
84
Ian Rogers6fac4472014-02-25 17:01:10 -080085inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
86 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87 size_t num_bytes = obj->SizeOf();
88 if (usable_size != nullptr) {
89 *usable_size = RoundUp(num_bytes, kAlignment);
90 }
91 return num_bytes;
92}
93
Mathieu Chartier590fee92013-09-13 13:46:47 -070094} // namespace space
95} // namespace gc
96} // namespace art
97
98#endif // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_