Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_READ_BARRIER_H_ |
| 18 | #define ART_RUNTIME_READ_BARRIER_H_ |
| 19 | |
Andreas Gampe | aea05c1 | 2017-05-19 08:45:02 -0700 | [diff] [blame] | 20 | #include "base/logging.h" |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 21 | #include "base/macros.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 22 | #include "base/mutex.h" |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 23 | #include "gc_root.h" |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 24 | #include "jni.h" |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 25 | #include "mirror/object_reference.h" |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 26 | #include "offsets.h" |
| 27 | #include "read_barrier_c.h" |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 28 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 29 | // This is a C++ (not C) header file, separate from read_barrier_c.h |
| 30 | // which needs to be a C header file for asm_support.h. |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 31 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 32 | namespace art { |
| 33 | namespace mirror { |
| 34 | class Object; |
| 35 | template<typename MirrorType> class HeapReference; |
| 36 | } // namespace mirror |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 37 | class ArtMethod; |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 38 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 39 | class ReadBarrier { |
| 40 | public: |
Andreas Gampe | aea05c1 | 2017-05-19 08:45:02 -0700 | [diff] [blame] | 41 | // Enable the to-space invariant checks. This is slow and happens very often. Do not enable in |
| 42 | // fast-debug environment. |
| 43 | DECLARE_RUNTIME_DEBUG_FLAG(kEnableToSpaceInvariantChecks); |
| 44 | |
| 45 | // Enable the read barrier checks. This is slow and happens very often. Do not enable in |
| 46 | // fast-debug environment. |
| 47 | DECLARE_RUNTIME_DEBUG_FLAG(kEnableReadBarrierInvariantChecks); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 48 | |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 49 | // It's up to the implementation whether the given field gets updated whereas the return value |
| 50 | // must be an updated reference unless kAlwaysUpdateField is true. |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 51 | template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier, |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 52 | bool kAlwaysUpdateField = false> |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 53 | ALWAYS_INLINE static MirrorType* Barrier( |
| 54 | mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 55 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 4cba0d9 | 2014-05-21 21:10:23 -0700 | [diff] [blame] | 56 | |
Hiroshi Yamauchi | ea2e1bd | 2014-06-18 13:47:35 -0700 | [diff] [blame] | 57 | // It's up to the implementation whether the given root gets updated |
| 58 | // whereas the return value must be an updated reference. |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 59 | template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier> |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 60 | ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root, |
| 61 | GcRootSource* gc_root_source = nullptr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 62 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 63 | |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 64 | // It's up to the implementation whether the given root gets updated |
| 65 | // whereas the return value must be an updated reference. |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 66 | template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier> |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 67 | ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root, |
| 68 | GcRootSource* gc_root_source = nullptr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 69 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 70 | |
Nicolas Geoffray | 13056a1 | 2017-05-11 11:48:28 +0000 | [diff] [blame] | 71 | // Return the mirror Object if it is marked, or null if not. |
| 72 | template <typename MirrorType> |
| 73 | ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref) |
| 74 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 75 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 76 | static bool IsDuringStartup(); |
| 77 | |
| 78 | // Without the holder object. |
| 79 | static void AssertToSpaceInvariant(mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 80 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 81 | AssertToSpaceInvariant(nullptr, MemberOffset(0), ref); |
| 82 | } |
| 83 | // With the holder object. |
| 84 | static void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, |
| 85 | mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 86 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 87 | // With GcRootSource. |
| 88 | static void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 89 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 90 | |
Hiroshi Yamauchi | 2e5de78 | 2016-01-29 12:06:36 -0800 | [diff] [blame] | 91 | // ALWAYS_INLINE on this caused a performance regression b/26744236. |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 92 | static mirror::Object* Mark(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 93 | |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 94 | static constexpr uint32_t WhiteState() { |
| 95 | return white_state_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 96 | } |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 97 | static constexpr uint32_t GrayState() { |
| 98 | return gray_state_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 99 | } |
| 100 | |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 101 | // fake_address_dependency will be zero which should be bitwise-or'ed with the address of the |
| 102 | // subsequent load to prevent the reordering of the read barrier bit load and the subsequent |
| 103 | // object reference load (from one of `obj`'s fields). |
| 104 | // *fake_address_dependency will be set to 0. |
| 105 | ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 106 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 107 | |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 108 | // This uses a load-acquire to load the read barrier bit internally to prevent the reordering of |
| 109 | // the read barrier bit load and the subsequent load. |
| 110 | ALWAYS_INLINE static bool IsGray(mirror::Object* obj) |
| 111 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 112 | |
| 113 | static bool IsValidReadBarrierState(uint32_t rb_state) { |
| 114 | return rb_state == white_state_ || rb_state == gray_state_; |
| 115 | } |
| 116 | |
| 117 | static constexpr uint32_t white_state_ = 0x0; // Not marked. |
| 118 | static constexpr uint32_t gray_state_ = 0x1; // Marked, but not marked through. On mark stack. |
| 119 | static constexpr uint32_t rb_state_mask_ = 0x1; // The low bits for white|gray. |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 120 | }; |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 121 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 122 | } // namespace art |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 123 | |
| 124 | #endif // ART_RUNTIME_READ_BARRIER_H_ |