blob: 45e78bcc07e31613b8cdf1ae0d5d9978a228eee6 [file] [log] [blame]
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_READ_BARRIER_H_
18#define ART_RUNTIME_READ_BARRIER_H_
19
Andreas Gampeaea05c12017-05-19 08:45:02 -070020#include "base/logging.h"
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -070021#include "base/macros.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070022#include "base/mutex.h"
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070023#include "gc_root.h"
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080024#include "jni.h"
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070025#include "mirror/object_reference.h"
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -070026#include "offsets.h"
Andreas Gampe217488a2017-09-18 08:34:42 -070027#include "read_barrier_config.h"
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -070028
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -070029namespace art {
30namespace mirror {
31 class Object;
32 template<typename MirrorType> class HeapReference;
33} // namespace mirror
Mathieu Chartiere401d142015-04-22 13:56:20 -070034class ArtMethod;
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -070035
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -070036class ReadBarrier {
37 public:
Andreas Gampeaea05c12017-05-19 08:45:02 -070038 // Enable the to-space invariant checks. This is slow and happens very often. Do not enable in
39 // fast-debug environment.
40 DECLARE_RUNTIME_DEBUG_FLAG(kEnableToSpaceInvariantChecks);
41
42 // Enable the read barrier checks. This is slow and happens very often. Do not enable in
43 // fast-debug environment.
44 DECLARE_RUNTIME_DEBUG_FLAG(kEnableReadBarrierInvariantChecks);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080045
Hans Boehmcc55e1d2017-07-27 15:28:07 -070046 // Return the reference at ref_addr, invoking read barrier as appropriate.
47 // Ref_addr is an address within obj.
Hiroshi Yamauchicc78f3f2015-12-11 15:51:04 -080048 // It's up to the implementation whether the given field gets updated whereas the return value
49 // must be an updated reference unless kAlwaysUpdateField is true.
Hans Boehmcc55e1d2017-07-27 15:28:07 -070050 template <typename MirrorType,
51 bool kIsVolatile,
52 ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
Hiroshi Yamauchicc78f3f2015-12-11 15:51:04 -080053 bool kAlwaysUpdateField = false>
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -070054 ALWAYS_INLINE static MirrorType* Barrier(
55 mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070056 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi4cba0d92014-05-21 21:10:23 -070057
Hiroshi Yamauchiea2e1bd2014-06-18 13:47:35 -070058 // It's up to the implementation whether the given root gets updated
59 // whereas the return value must be an updated reference.
Hiroshi Yamauchicc78f3f2015-12-11 15:51:04 -080060 template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070061 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root,
62 GcRootSource* gc_root_source = nullptr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070063 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080064
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070065 // It's up to the implementation whether the given root gets updated
66 // whereas the return value must be an updated reference.
Hiroshi Yamauchicc78f3f2015-12-11 15:51:04 -080067 template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070068 ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root,
69 GcRootSource* gc_root_source = nullptr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070070 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070071
Nicolas Geoffray13056a12017-05-11 11:48:28 +000072 // Return the mirror Object if it is marked, or null if not.
73 template <typename MirrorType>
74 ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref)
75 REQUIRES_SHARED(Locks::mutator_lock_);
76
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080077 static bool IsDuringStartup();
78
79 // Without the holder object.
80 static void AssertToSpaceInvariant(mirror::Object* ref)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070081 REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080082 AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
83 }
84 // With the holder object.
85 static void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset,
86 mirror::Object* ref)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070087 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070088 // With GcRootSource.
89 static void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070090 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080091
Hiroshi Yamauchi2e5de782016-01-29 12:06:36 -080092 // ALWAYS_INLINE on this caused a performance regression b/26744236.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070093 static mirror::Object* Mark(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080094
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070095 static constexpr uint32_t WhiteState() {
96 return white_state_;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080097 }
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070098 static constexpr uint32_t GrayState() {
99 return gray_state_;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800100 }
101
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -0700102 // fake_address_dependency will be zero which should be bitwise-or'ed with the address of the
103 // subsequent load to prevent the reordering of the read barrier bit load and the subsequent
104 // object reference load (from one of `obj`'s fields).
105 // *fake_address_dependency will be set to 0.
106 ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700107 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800108
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -0700109 // This uses a load-acquire to load the read barrier bit internally to prevent the reordering of
110 // the read barrier bit load and the subsequent load.
111 ALWAYS_INLINE static bool IsGray(mirror::Object* obj)
112 REQUIRES_SHARED(Locks::mutator_lock_);
113
114 static bool IsValidReadBarrierState(uint32_t rb_state) {
115 return rb_state == white_state_ || rb_state == gray_state_;
116 }
117
118 static constexpr uint32_t white_state_ = 0x0; // Not marked.
119 static constexpr uint32_t gray_state_ = 0x1; // Marked, but not marked through. On mark stack.
120 static constexpr uint32_t rb_state_mask_ = 0x1; // The low bits for white|gray.
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -0700121};
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -0700122
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -0700123} // namespace art
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -0700124
125#endif // ART_RUNTIME_READ_BARRIER_H_