blob: 0c0a89b253fd0a75cadfdfb2835a0a86ab33d487 [file] [log] [blame]
Mathieu Chartier8d562102014-03-12 17:42:10 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
18#define ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
19
20#include "base/macros.h"
21#include "base/mutex.h"
22#include "gc/space/space-inl.h"
23
24namespace art {
25namespace mirror {
26class Object;
27} // namespace mirror
28namespace gc {
29namespace space {
30class ContinuousSpace;
31} // namespace space
32
33namespace collector {
34
35// An immune region is a continuous region of memory for which all objects contained are assumed to
36// be marked. This is used as an optimization in the GC to avoid needing to test the mark bitmap of
37// the zygote, image spaces, and sometimes non moving spaces. Doing the ContainsObject check is
38// faster than doing a bitmap read. There is no support for discontinuous spaces and you need to be
39// careful that your immune region doesn't contain any large objects.
40class ImmuneRegion {
41 public:
42 ImmuneRegion();
43 void Reset();
44 bool AddContinuousSpace(space::ContinuousSpace* space)
45 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
46 bool ContainsSpace(const space::ContinuousSpace* space) const;
47 // Returns true if an object is inside of the immune region (assumed to be marked).
48 bool ContainsObject(const mirror::Object* obj) const ALWAYS_INLINE {
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070049 // Note: Relies on integer underflow behavior.
50 return reinterpret_cast<uintptr_t>(obj) - reinterpret_cast<uintptr_t>(begin_) < size_;
51 }
52 void SetBegin(mirror::Object* begin) {
53 begin_ = begin;
54 UpdateSize();
55 }
56 void SetEnd(mirror::Object* end) {
57 end_ = end;
58 UpdateSize();
Mathieu Chartier8d562102014-03-12 17:42:10 -070059 }
60
61 private:
62 bool IsEmpty() const {
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070063 return size_ == 0;
64 }
65 void UpdateSize() {
66 size_ = reinterpret_cast<uintptr_t>(end_) - reinterpret_cast<uintptr_t>(begin_);
Mathieu Chartier8d562102014-03-12 17:42:10 -070067 }
68
69 mirror::Object* begin_;
70 mirror::Object* end_;
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070071 uintptr_t size_;
Mathieu Chartier8d562102014-03-12 17:42:10 -070072};
73
74} // namespace collector
75} // namespace gc
76} // namespace art
77
78#endif // ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_