Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_ |
| 18 | #define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_ |
| 19 | |
| 20 | #include "bump_pointer_space.h" |
| 21 | |
| 22 | namespace art { |
| 23 | namespace gc { |
| 24 | namespace space { |
| 25 | |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 26 | inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated, |
| 27 | size_t* usable_size) { |
| 28 | num_bytes = RoundUp(num_bytes, kAlignment); |
| 29 | mirror::Object* ret = AllocNonvirtual(num_bytes); |
| 30 | if (LIKELY(ret != nullptr)) { |
| 31 | *bytes_allocated = num_bytes; |
| 32 | if (usable_size != nullptr) { |
| 33 | *usable_size = num_bytes; |
| 34 | } |
| 35 | } |
| 36 | return ret; |
| 37 | } |
| 38 | |
Mathieu Chartier | 0651d41 | 2014-04-29 14:37:57 -0700 | [diff] [blame] | 39 | inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes, |
| 40 | size_t* bytes_allocated, |
| 41 | size_t* usable_size) { |
| 42 | Locks::mutator_lock_->AssertExclusiveHeld(self); |
| 43 | num_bytes = RoundUp(num_bytes, kAlignment); |
Ian Rogers | be2a1df | 2014-07-10 00:56:36 -0700 | [diff] [blame] | 44 | byte* end = end_.LoadRelaxed(); |
| 45 | if (end + num_bytes > growth_end_) { |
Mathieu Chartier | 0651d41 | 2014-04-29 14:37:57 -0700 | [diff] [blame] | 46 | return nullptr; |
| 47 | } |
Ian Rogers | be2a1df | 2014-07-10 00:56:36 -0700 | [diff] [blame] | 48 | mirror::Object* obj = reinterpret_cast<mirror::Object*>(end); |
| 49 | end_.StoreRelaxed(end + num_bytes); |
Mathieu Chartier | 0651d41 | 2014-04-29 14:37:57 -0700 | [diff] [blame] | 50 | *bytes_allocated = num_bytes; |
| 51 | // Use the CAS free versions as an optimization. |
Ian Rogers | 3e5cf30 | 2014-05-20 16:40:37 -0700 | [diff] [blame] | 52 | objects_allocated_.StoreRelaxed(objects_allocated_.LoadRelaxed() + 1); |
| 53 | bytes_allocated_.StoreRelaxed(bytes_allocated_.LoadRelaxed() + num_bytes); |
Mathieu Chartier | 0651d41 | 2014-04-29 14:37:57 -0700 | [diff] [blame] | 54 | if (UNLIKELY(usable_size != nullptr)) { |
| 55 | *usable_size = num_bytes; |
| 56 | } |
| 57 | return obj; |
| 58 | } |
| 59 | |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 60 | inline mirror::Object* BumpPointerSpace::AllocNonvirtualWithoutAccounting(size_t num_bytes) { |
| 61 | DCHECK(IsAligned<kAlignment>(num_bytes)); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 62 | byte* old_end; |
| 63 | byte* new_end; |
| 64 | do { |
Ian Rogers | be2a1df | 2014-07-10 00:56:36 -0700 | [diff] [blame] | 65 | old_end = end_.LoadRelaxed(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 66 | new_end = old_end + num_bytes; |
| 67 | // If there is no more room in the region, we are out of memory. |
| 68 | if (UNLIKELY(new_end > growth_end_)) { |
| 69 | return nullptr; |
| 70 | } |
Ian Rogers | be2a1df | 2014-07-10 00:56:36 -0700 | [diff] [blame] | 71 | } while (!end_.CompareExchangeWeakSequentiallyConsistent(old_end, new_end)); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 72 | return reinterpret_cast<mirror::Object*>(old_end); |
| 73 | } |
| 74 | |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 75 | inline mirror::Object* BumpPointerSpace::AllocNonvirtual(size_t num_bytes) { |
| 76 | mirror::Object* ret = AllocNonvirtualWithoutAccounting(num_bytes); |
| 77 | if (ret != nullptr) { |
Ian Rogers | 3e5cf30 | 2014-05-20 16:40:37 -0700 | [diff] [blame] | 78 | objects_allocated_.FetchAndAddSequentiallyConsistent(1); |
| 79 | bytes_allocated_.FetchAndAddSequentiallyConsistent(num_bytes); |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 80 | } |
| 81 | return ret; |
| 82 | } |
| 83 | |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 84 | inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) |
| 85 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 86 | size_t num_bytes = obj->SizeOf(); |
| 87 | if (usable_size != nullptr) { |
| 88 | *usable_size = RoundUp(num_bytes, kAlignment); |
| 89 | } |
| 90 | return num_bytes; |
| 91 | } |
| 92 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 93 | } // namespace space |
| 94 | } // namespace gc |
| 95 | } // namespace art |
| 96 | |
| 97 | #endif // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_ |