blob: 3ead50104637da927103c74ff317c3ee7c45dd1a [file] [log] [blame]
Mathieu Chartier8d562102014-03-12 17:42:10 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
18#define ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
19
20#include "base/macros.h"
21#include "base/mutex.h"
Mathieu Chartier8d562102014-03-12 17:42:10 -070022
23namespace art {
24namespace mirror {
25class Object;
26} // namespace mirror
27namespace gc {
28namespace space {
29class ContinuousSpace;
30} // namespace space
31
32namespace collector {
33
34// An immune region is a continuous region of memory for which all objects contained are assumed to
35// be marked. This is used as an optimization in the GC to avoid needing to test the mark bitmap of
36// the zygote, image spaces, and sometimes non moving spaces. Doing the ContainsObject check is
37// faster than doing a bitmap read. There is no support for discontinuous spaces and you need to be
38// careful that your immune region doesn't contain any large objects.
39class ImmuneRegion {
40 public:
41 ImmuneRegion();
42 void Reset();
43 bool AddContinuousSpace(space::ContinuousSpace* space)
Mathieu Chartier90443472015-07-16 20:32:27 -070044 REQUIRES(Locks::heap_bitmap_lock_);
Mathieu Chartier8d562102014-03-12 17:42:10 -070045 bool ContainsSpace(const space::ContinuousSpace* space) const;
46 // Returns true if an object is inside of the immune region (assumed to be marked).
47 bool ContainsObject(const mirror::Object* obj) const ALWAYS_INLINE {
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070048 // Note: Relies on integer underflow behavior.
49 return reinterpret_cast<uintptr_t>(obj) - reinterpret_cast<uintptr_t>(begin_) < size_;
50 }
51 void SetBegin(mirror::Object* begin) {
52 begin_ = begin;
53 UpdateSize();
54 }
55 void SetEnd(mirror::Object* end) {
56 end_ = end;
57 UpdateSize();
Mathieu Chartier8d562102014-03-12 17:42:10 -070058 }
59
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080060 mirror::Object* Begin() {
61 return begin_;
62 }
63 mirror::Object* End() {
64 return end_;
65 }
66
Mathieu Chartier8d562102014-03-12 17:42:10 -070067 private:
68 bool IsEmpty() const {
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070069 return size_ == 0;
70 }
71 void UpdateSize() {
72 size_ = reinterpret_cast<uintptr_t>(end_) - reinterpret_cast<uintptr_t>(begin_);
Mathieu Chartier8d562102014-03-12 17:42:10 -070073 }
74
75 mirror::Object* begin_;
76 mirror::Object* end_;
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070077 uintptr_t size_;
Mathieu Chartier8d562102014-03-12 17:42:10 -070078};
79
80} // namespace collector
81} // namespace gc
82} // namespace art
83
84#endif // ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_