Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_GC_SPACE_REGION_SPACE_INL_H_ |
| 18 | #define ART_RUNTIME_GC_SPACE_REGION_SPACE_INL_H_ |
| 19 | |
| 20 | #include "region_space.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 21 | #include "thread-current-inl.h" |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 22 | |
| 23 | namespace art { |
| 24 | namespace gc { |
| 25 | namespace space { |
| 26 | |
| 27 | inline mirror::Object* RegionSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 28 | size_t* usable_size, |
| 29 | size_t* bytes_tl_bulk_allocated) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 30 | num_bytes = RoundUp(num_bytes, kAlignment); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 31 | return AllocNonvirtual<false>(num_bytes, bytes_allocated, usable_size, |
| 32 | bytes_tl_bulk_allocated); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 33 | } |
| 34 | |
| 35 | inline mirror::Object* RegionSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes, |
| 36 | size_t* bytes_allocated, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 37 | size_t* usable_size, |
| 38 | size_t* bytes_tl_bulk_allocated) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 39 | Locks::mutator_lock_->AssertExclusiveHeld(self); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 40 | return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 41 | } |
| 42 | |
| 43 | template<bool kForEvac> |
| 44 | inline mirror::Object* RegionSpace::AllocNonvirtual(size_t num_bytes, size_t* bytes_allocated, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 45 | size_t* usable_size, |
| 46 | size_t* bytes_tl_bulk_allocated) { |
Roland Levillain | 14d9057 | 2015-07-16 10:52:26 +0100 | [diff] [blame] | 47 | DCHECK_ALIGNED(num_bytes, kAlignment); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 48 | mirror::Object* obj; |
| 49 | if (LIKELY(num_bytes <= kRegionSize)) { |
| 50 | // Non-large object. |
Mathieu Chartier | 420823f | 2017-06-27 21:24:04 -0700 | [diff] [blame] | 51 | obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes, |
| 52 | bytes_allocated, |
| 53 | usable_size, |
| 54 | bytes_tl_bulk_allocated); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 55 | if (LIKELY(obj != nullptr)) { |
| 56 | return obj; |
| 57 | } |
| 58 | MutexLock mu(Thread::Current(), region_lock_); |
| 59 | // Retry with current region since another thread may have updated it. |
Mathieu Chartier | 420823f | 2017-06-27 21:24:04 -0700 | [diff] [blame] | 60 | obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes, |
| 61 | bytes_allocated, |
| 62 | usable_size, |
| 63 | bytes_tl_bulk_allocated); |
Mathieu Chartier | b645c306 | 2017-06-28 04:22:04 +0000 | [diff] [blame] | 64 | if (LIKELY(obj != nullptr)) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 65 | return obj; |
| 66 | } |
Mathieu Chartier | 420823f | 2017-06-27 21:24:04 -0700 | [diff] [blame] | 67 | Region* r = AllocateRegion(kForEvac); |
| 68 | if (LIKELY(r != nullptr)) { |
Mathieu Chartier | 478986c | 2017-06-30 09:40:50 -0700 | [diff] [blame] | 69 | obj = r->Alloc(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); |
| 70 | CHECK(obj != nullptr); |
| 71 | // Do our allocation before setting the region, this makes sure no threads race ahead |
| 72 | // and fill in the region before we allocate the object. b/63153464 |
Mathieu Chartier | 420823f | 2017-06-27 21:24:04 -0700 | [diff] [blame] | 73 | if (kForEvac) { |
| 74 | evac_region_ = r; |
| 75 | } else { |
| 76 | current_region_ = r; |
Mathieu Chartier | b645c306 | 2017-06-28 04:22:04 +0000 | [diff] [blame] | 77 | } |
Mathieu Chartier | 420823f | 2017-06-27 21:24:04 -0700 | [diff] [blame] | 78 | return obj; |
Mathieu Chartier | b645c306 | 2017-06-28 04:22:04 +0000 | [diff] [blame] | 79 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 80 | } else { |
| 81 | // Large object. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 82 | obj = AllocLarge<kForEvac>(num_bytes, bytes_allocated, usable_size, |
| 83 | bytes_tl_bulk_allocated); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 84 | if (LIKELY(obj != nullptr)) { |
| 85 | return obj; |
| 86 | } |
| 87 | } |
| 88 | return nullptr; |
| 89 | } |
| 90 | |
| 91 | inline mirror::Object* RegionSpace::Region::Alloc(size_t num_bytes, size_t* bytes_allocated, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 92 | size_t* usable_size, |
| 93 | size_t* bytes_tl_bulk_allocated) { |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 94 | DCHECK(IsAllocated() && IsInToSpace()); |
Roland Levillain | 14d9057 | 2015-07-16 10:52:26 +0100 | [diff] [blame] | 95 | DCHECK_ALIGNED(num_bytes, kAlignment); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 96 | uint8_t* old_top; |
| 97 | uint8_t* new_top; |
| 98 | do { |
Mathieu Chartier | 22c8e40 | 2016-11-05 13:32:08 -0700 | [diff] [blame] | 99 | old_top = top_.LoadRelaxed(); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 100 | new_top = old_top + num_bytes; |
| 101 | if (UNLIKELY(new_top > end_)) { |
| 102 | return nullptr; |
| 103 | } |
Mathieu Chartier | 22c8e40 | 2016-11-05 13:32:08 -0700 | [diff] [blame] | 104 | } while (!top_.CompareExchangeWeakRelaxed(old_top, new_top)); |
| 105 | objects_allocated_.FetchAndAddRelaxed(1); |
| 106 | DCHECK_LE(Top(), end_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 107 | DCHECK_LT(old_top, end_); |
| 108 | DCHECK_LE(new_top, end_); |
| 109 | *bytes_allocated = num_bytes; |
| 110 | if (usable_size != nullptr) { |
| 111 | *usable_size = num_bytes; |
| 112 | } |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 113 | *bytes_tl_bulk_allocated = num_bytes; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 114 | return reinterpret_cast<mirror::Object*>(old_top); |
| 115 | } |
| 116 | |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 117 | template<RegionSpace::RegionType kRegionType> |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 118 | uint64_t RegionSpace::GetBytesAllocatedInternal() { |
| 119 | uint64_t bytes = 0; |
| 120 | MutexLock mu(Thread::Current(), region_lock_); |
| 121 | for (size_t i = 0; i < num_regions_; ++i) { |
| 122 | Region* r = ®ions_[i]; |
| 123 | if (r->IsFree()) { |
| 124 | continue; |
| 125 | } |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 126 | switch (kRegionType) { |
| 127 | case RegionType::kRegionTypeAll: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 128 | bytes += r->BytesAllocated(); |
| 129 | break; |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 130 | case RegionType::kRegionTypeFromSpace: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 131 | if (r->IsInFromSpace()) { |
| 132 | bytes += r->BytesAllocated(); |
| 133 | } |
| 134 | break; |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 135 | case RegionType::kRegionTypeUnevacFromSpace: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 136 | if (r->IsInUnevacFromSpace()) { |
| 137 | bytes += r->BytesAllocated(); |
| 138 | } |
| 139 | break; |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 140 | case RegionType::kRegionTypeToSpace: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 141 | if (r->IsInToSpace()) { |
| 142 | bytes += r->BytesAllocated(); |
| 143 | } |
| 144 | break; |
| 145 | default: |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 146 | LOG(FATAL) << "Unexpected space type : " << kRegionType; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 147 | } |
| 148 | } |
| 149 | return bytes; |
| 150 | } |
| 151 | |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 152 | template<RegionSpace::RegionType kRegionType> |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 153 | uint64_t RegionSpace::GetObjectsAllocatedInternal() { |
| 154 | uint64_t bytes = 0; |
| 155 | MutexLock mu(Thread::Current(), region_lock_); |
| 156 | for (size_t i = 0; i < num_regions_; ++i) { |
| 157 | Region* r = ®ions_[i]; |
| 158 | if (r->IsFree()) { |
| 159 | continue; |
| 160 | } |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 161 | switch (kRegionType) { |
| 162 | case RegionType::kRegionTypeAll: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 163 | bytes += r->ObjectsAllocated(); |
| 164 | break; |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 165 | case RegionType::kRegionTypeFromSpace: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 166 | if (r->IsInFromSpace()) { |
| 167 | bytes += r->ObjectsAllocated(); |
| 168 | } |
| 169 | break; |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 170 | case RegionType::kRegionTypeUnevacFromSpace: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 171 | if (r->IsInUnevacFromSpace()) { |
| 172 | bytes += r->ObjectsAllocated(); |
| 173 | } |
| 174 | break; |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 175 | case RegionType::kRegionTypeToSpace: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 176 | if (r->IsInToSpace()) { |
| 177 | bytes += r->ObjectsAllocated(); |
| 178 | } |
| 179 | break; |
| 180 | default: |
Hiroshi Yamauchi | d25f842 | 2015-01-30 16:25:12 -0800 | [diff] [blame] | 181 | LOG(FATAL) << "Unexpected space type : " << kRegionType; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 182 | } |
| 183 | } |
| 184 | return bytes; |
| 185 | } |
| 186 | |
| 187 | template<bool kToSpaceOnly> |
| 188 | void RegionSpace::WalkInternal(ObjectCallback* callback, void* arg) { |
| 189 | // TODO: MutexLock on region_lock_ won't work due to lock order |
| 190 | // issues (the classloader classes lock and the monitor lock). We |
| 191 | // call this with threads suspended. |
| 192 | Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); |
| 193 | for (size_t i = 0; i < num_regions_; ++i) { |
| 194 | Region* r = ®ions_[i]; |
| 195 | if (r->IsFree() || (kToSpaceOnly && !r->IsInToSpace())) { |
| 196 | continue; |
| 197 | } |
| 198 | if (r->IsLarge()) { |
Mathieu Chartier | 371b047 | 2017-02-27 16:37:21 -0800 | [diff] [blame] | 199 | // Avoid visiting dead large objects since they may contain dangling pointers to the |
| 200 | // from-space. |
| 201 | DCHECK_GT(r->LiveBytes(), 0u) << "Visiting dead large object"; |
| 202 | mirror::Object* obj = reinterpret_cast<mirror::Object*>(r->Begin()); |
| 203 | DCHECK(obj->GetClass() != nullptr); |
| 204 | callback(obj, arg); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 205 | } else if (r->IsLargeTail()) { |
| 206 | // Do nothing. |
| 207 | } else { |
Mathieu Chartier | 7ec38dc | 2016-10-07 15:24:46 -0700 | [diff] [blame] | 208 | // For newly allocated and evacuated regions, live bytes will be -1. |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 209 | uint8_t* pos = r->Begin(); |
| 210 | uint8_t* top = r->Top(); |
Mathieu Chartier | 7ec38dc | 2016-10-07 15:24:46 -0700 | [diff] [blame] | 211 | const bool need_bitmap = |
| 212 | r->LiveBytes() != static_cast<size_t>(-1) && |
| 213 | r->LiveBytes() != static_cast<size_t>(top - pos); |
| 214 | if (need_bitmap) { |
| 215 | GetLiveBitmap()->VisitMarkedRange( |
| 216 | reinterpret_cast<uintptr_t>(pos), |
| 217 | reinterpret_cast<uintptr_t>(top), |
| 218 | [callback, arg](mirror::Object* obj) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 219 | callback(obj, arg); |
Mathieu Chartier | 7ec38dc | 2016-10-07 15:24:46 -0700 | [diff] [blame] | 220 | }); |
| 221 | } else { |
| 222 | while (pos < top) { |
| 223 | mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos); |
| 224 | if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) { |
| 225 | callback(obj, arg); |
| 226 | pos = reinterpret_cast<uint8_t*>(GetNextObject(obj)); |
| 227 | } else { |
| 228 | break; |
| 229 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 230 | } |
| 231 | } |
| 232 | } |
| 233 | } |
| 234 | } |
| 235 | |
| 236 | inline mirror::Object* RegionSpace::GetNextObject(mirror::Object* obj) { |
| 237 | const uintptr_t position = reinterpret_cast<uintptr_t>(obj) + obj->SizeOf(); |
| 238 | return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); |
| 239 | } |
| 240 | |
| 241 | template<bool kForEvac> |
| 242 | mirror::Object* RegionSpace::AllocLarge(size_t num_bytes, size_t* bytes_allocated, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame] | 243 | size_t* usable_size, |
| 244 | size_t* bytes_tl_bulk_allocated) { |
Roland Levillain | 14d9057 | 2015-07-16 10:52:26 +0100 | [diff] [blame] | 245 | DCHECK_ALIGNED(num_bytes, kAlignment); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 246 | DCHECK_GT(num_bytes, kRegionSize); |
| 247 | size_t num_regs = RoundUp(num_bytes, kRegionSize) / kRegionSize; |
| 248 | DCHECK_GT(num_regs, 0U); |
| 249 | DCHECK_LT((num_regs - 1) * kRegionSize, num_bytes); |
| 250 | DCHECK_LE(num_bytes, num_regs * kRegionSize); |
| 251 | MutexLock mu(Thread::Current(), region_lock_); |
| 252 | if (!kForEvac) { |
| 253 | // Retain sufficient free regions for full evacuation. |
| 254 | if ((num_non_free_regions_ + num_regs) * 2 > num_regions_) { |
| 255 | return nullptr; |
| 256 | } |
| 257 | } |
| 258 | // Find a large enough contiguous free regions. |
| 259 | size_t left = 0; |
| 260 | while (left + num_regs - 1 < num_regions_) { |
| 261 | bool found = true; |
| 262 | size_t right = left; |
| 263 | DCHECK_LT(right, left + num_regs) |
| 264 | << "The inner loop Should iterate at least once"; |
| 265 | while (right < left + num_regs) { |
| 266 | if (regions_[right].IsFree()) { |
| 267 | ++right; |
| 268 | } else { |
| 269 | found = false; |
| 270 | break; |
| 271 | } |
| 272 | } |
| 273 | if (found) { |
| 274 | // right points to the one region past the last free region. |
| 275 | DCHECK_EQ(left + num_regs, right); |
| 276 | Region* first_reg = ®ions_[left]; |
| 277 | DCHECK(first_reg->IsFree()); |
Hiroshi Yamauchi | 6711cd8 | 2017-02-23 15:11:56 -0800 | [diff] [blame] | 278 | first_reg->UnfreeLarge(this, time_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 279 | ++num_non_free_regions_; |
Nicolas Geoffray | 4b361a8 | 2017-07-06 15:30:10 +0100 | [diff] [blame] | 280 | size_t allocated = num_regs * kRegionSize; |
| 281 | // We make 'top' all usable bytes, as the caller of this |
| 282 | // allocation may use all of 'usable_size' (see mirror::Array::Alloc). |
| 283 | first_reg->SetTop(first_reg->Begin() + allocated); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 284 | for (size_t p = left + 1; p < right; ++p) { |
| 285 | DCHECK_LT(p, num_regions_); |
| 286 | DCHECK(regions_[p].IsFree()); |
Hiroshi Yamauchi | 6711cd8 | 2017-02-23 15:11:56 -0800 | [diff] [blame] | 287 | regions_[p].UnfreeLargeTail(this, time_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 288 | ++num_non_free_regions_; |
| 289 | } |
Nicolas Geoffray | 4b361a8 | 2017-07-06 15:30:10 +0100 | [diff] [blame] | 290 | *bytes_allocated = allocated; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 291 | if (usable_size != nullptr) { |
Nicolas Geoffray | 4b361a8 | 2017-07-06 15:30:10 +0100 | [diff] [blame] | 292 | *usable_size = allocated; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 293 | } |
Nicolas Geoffray | 4b361a8 | 2017-07-06 15:30:10 +0100 | [diff] [blame] | 294 | *bytes_tl_bulk_allocated = allocated; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 295 | return reinterpret_cast<mirror::Object*>(first_reg->Begin()); |
| 296 | } else { |
| 297 | // right points to the non-free region. Start with the one after it. |
| 298 | left = right + 1; |
| 299 | } |
| 300 | } |
| 301 | return nullptr; |
| 302 | } |
| 303 | |
Mathieu Chartier | 6bc7774 | 2017-04-18 17:46:23 -0700 | [diff] [blame] | 304 | inline size_t RegionSpace::Region::BytesAllocated() const { |
| 305 | if (IsLarge()) { |
| 306 | DCHECK_LT(begin_ + kRegionSize, Top()); |
| 307 | return static_cast<size_t>(Top() - begin_); |
| 308 | } else if (IsLargeTail()) { |
| 309 | DCHECK_EQ(begin_, Top()); |
| 310 | return 0; |
| 311 | } else { |
| 312 | DCHECK(IsAllocated()) << static_cast<uint>(state_); |
| 313 | DCHECK_LE(begin_, Top()); |
| 314 | size_t bytes; |
| 315 | if (is_a_tlab_) { |
| 316 | bytes = thread_->GetThreadLocalBytesAllocated(); |
| 317 | } else { |
| 318 | bytes = static_cast<size_t>(Top() - begin_); |
| 319 | } |
| 320 | DCHECK_LE(bytes, kRegionSize); |
| 321 | return bytes; |
| 322 | } |
| 323 | } |
| 324 | |
| 325 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 326 | } // namespace space |
| 327 | } // namespace gc |
| 328 | } // namespace art |
| 329 | |
| 330 | #endif // ART_RUNTIME_GC_SPACE_REGION_SPACE_INL_H_ |