Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 1 | // Copyright 2020 The Pigweed Authors |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); you may not |
| 4 | // use this file except in compliance with the License. You may obtain a copy of |
| 5 | // the License at |
| 6 | // |
| 7 | // https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| 11 | // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
| 12 | // License for the specific language governing permissions and limitations under |
| 13 | // the License. |
| 14 | |
| 15 | #include "pw_allocator/block.h" |
| 16 | |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 17 | #include <cstring> |
| 18 | |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 19 | namespace pw::allocator { |
| 20 | |
Wyatt Hepler | e2cbadf | 2020-06-22 11:21:45 -0700 | [diff] [blame] | 21 | Status Block::Init(const std::span<std::byte> region, Block** block) { |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 22 | // Ensure the region we're given is aligned and sized accordingly |
| 23 | if (reinterpret_cast<uintptr_t>(region.data()) % alignof(Block) != 0) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 24 | return Status::InvalidArgument(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 25 | } |
| 26 | |
| 27 | if (region.size() < sizeof(Block)) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 28 | return Status::InvalidArgument(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 29 | } |
| 30 | |
| 31 | union { |
| 32 | Block* block; |
| 33 | std::byte* bytes; |
| 34 | } aliased; |
| 35 | aliased.bytes = region.data(); |
| 36 | |
| 37 | // Make "next" point just past the end of this block; forming a linked list |
| 38 | // with the following storage. Since the space between this block and the |
| 39 | // next are implicitly part of the raw data, size can be computed by |
| 40 | // subtracting the pointers. |
| 41 | aliased.block->next = reinterpret_cast<Block*>(region.end()); |
| 42 | aliased.block->MarkLast(); |
| 43 | |
| 44 | aliased.block->prev = nullptr; |
| 45 | *block = aliased.block; |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 46 | #if PW_ALLOCATOR_POISON_ENABLE |
| 47 | (*block)->PoisonBlock(); |
| 48 | #endif // PW_ALLOCATOR_POISON_ENABLE |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 49 | return Status::Ok(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 50 | } |
| 51 | |
| 52 | Status Block::Split(size_t head_block_inner_size, Block** new_block) { |
| 53 | if (new_block == nullptr) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 54 | return Status::InvalidArgument(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 55 | } |
| 56 | |
| 57 | // Don't split used blocks. |
| 58 | // TODO: Relax this restriction? Flag to enable/disable this check? |
| 59 | if (Used()) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 60 | return Status::FailedPrecondition(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 61 | } |
| 62 | |
| 63 | // First round the head_block_inner_size up to a alignof(Block) bounary. |
| 64 | // This ensures that the next block header is aligned accordingly. |
| 65 | // Alignment must be a power of two, hence align()-1 will return the |
| 66 | // remainder. |
| 67 | auto align_bit_mask = alignof(Block) - 1; |
| 68 | size_t aligned_head_block_inner_size = head_block_inner_size; |
| 69 | if ((head_block_inner_size & align_bit_mask) != 0) { |
| 70 | aligned_head_block_inner_size = |
| 71 | (head_block_inner_size & ~align_bit_mask) + alignof(Block); |
| 72 | } |
| 73 | |
| 74 | // (1) Are we trying to allocate a head block larger than the current head |
| 75 | // block? This may happen because of the alignment above. |
| 76 | if (aligned_head_block_inner_size > InnerSize()) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 77 | return Status::OutOfRange(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 78 | } |
| 79 | |
| 80 | // (2) Does the resulting block have enough space to store the header? |
| 81 | // TODO: What to do if the returned section is empty (i.e. remaining |
| 82 | // size == sizeof(Block))? |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 83 | if (InnerSize() - aligned_head_block_inner_size < |
| 84 | sizeof(Block) + 2 * PW_ALLOCATOR_POISON_OFFSET) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 85 | return Status::ResourceExhausted(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 86 | } |
| 87 | |
| 88 | // Create the new block inside the current one. |
| 89 | Block* new_next = reinterpret_cast<Block*>( |
| 90 | // From the current position... |
| 91 | reinterpret_cast<intptr_t>(this) + |
| 92 | // skip past the current header... |
| 93 | sizeof(*this) + |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 94 | // add the poison bytes before usable space ... |
| 95 | PW_ALLOCATOR_POISON_OFFSET + |
| 96 | // into the usable bytes by the new inner size... |
| 97 | aligned_head_block_inner_size + |
| 98 | // add the poison bytes after the usable space ... |
| 99 | PW_ALLOCATOR_POISON_OFFSET); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 100 | |
| 101 | // If we're inserting in the middle, we need to update the current next |
| 102 | // block to point to what we're inserting |
| 103 | if (!Last()) { |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 104 | Next()->prev = new_next; |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 105 | } |
| 106 | |
| 107 | // Copy next verbatim so the next block also gets the "last"-ness |
| 108 | new_next->next = next; |
| 109 | new_next->prev = this; |
| 110 | |
| 111 | // Update the current block to point to the new head. |
| 112 | next = new_next; |
| 113 | |
| 114 | *new_block = next; |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 115 | |
| 116 | #if PW_ALLOCATOR_POISON_ENABLE |
| 117 | PoisonBlock(); |
| 118 | (*new_block)->PoisonBlock(); |
| 119 | #endif // PW_ALLOCATOR_POISON_ENABLE |
| 120 | |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 121 | return Status::Ok(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 122 | } |
| 123 | |
| 124 | Status Block::MergeNext() { |
| 125 | // Anything to merge with? |
| 126 | if (Last()) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 127 | return Status::OutOfRange(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 128 | } |
| 129 | |
| 130 | // Is this or the next block in use? |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 131 | if (Used() || Next()->Used()) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 132 | return Status::FailedPrecondition(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 133 | } |
| 134 | |
| 135 | // Simply enough, this block's next pointer becomes the next block's |
| 136 | // next pointer. We then need to re-wire the "next next" block's prev |
| 137 | // pointer to point back to us though. |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 138 | next = Next()->next; |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 139 | |
| 140 | // Copying the pointer also copies the "last" status, so this is safe. |
| 141 | if (!Last()) { |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 142 | Next()->prev = this; |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 143 | } |
| 144 | |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 145 | return Status::Ok(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 146 | } |
| 147 | |
| 148 | Status Block::MergePrev() { |
| 149 | // We can't merge if we have no previous. After that though, merging with |
| 150 | // the previous block is just MergeNext from the previous block. |
| 151 | if (prev == nullptr) { |
Wyatt Hepler | d78f7c6 | 2020-09-28 14:27:32 -0700 | [diff] [blame] | 152 | return Status::OutOfRange(); |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 153 | } |
| 154 | |
| 155 | // WARNING: This class instance will still exist, but technically be invalid |
| 156 | // after this has been invoked. Be careful when doing anything with `this` |
| 157 | // After doing the below. |
| 158 | return prev->MergeNext(); |
| 159 | } |
| 160 | |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 161 | // TODO(pwbug/234): Add stack tracing to locate which call to the heap operation |
| 162 | // caused the corruption. |
| 163 | // TODO: Add detailed information to log report and leave succinct messages |
| 164 | // in the crash message. |
| 165 | void Block::CrashIfInvalid() { |
| 166 | switch (CheckStatus()) { |
| 167 | case VALID: |
| 168 | break; |
| 169 | case MISALIGNED: |
| 170 | PW_DCHECK(false, "The block at address %p is not aligned.", this); |
| 171 | break; |
| 172 | case NEXT_MISMATCHED: |
| 173 | PW_DCHECK(false, |
Chenghan Zhou | 091e312 | 2020-08-03 17:52:34 -0400 | [diff] [blame] | 174 | "The 'prev' field in the next block (%p) does not match the " |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 175 | "address of the current block (%p).", |
| 176 | Next()->Prev(), |
| 177 | this); |
| 178 | break; |
| 179 | case PREV_MISMATCHED: |
| 180 | PW_DCHECK(false, |
Chenghan Zhou | 091e312 | 2020-08-03 17:52:34 -0400 | [diff] [blame] | 181 | "The 'next' field in the previous block (%p) does not match " |
Chenghan Zhou | ea0f7ad | 2020-07-29 18:20:37 -0400 | [diff] [blame] | 182 | "the address of the current block (%p).", |
| 183 | Prev()->Next(), |
| 184 | this); |
| 185 | break; |
| 186 | case POISON_CORRUPTED: |
| 187 | PW_DCHECK( |
| 188 | false, "The poisoned pattern in the block at %p is corrupted.", this); |
| 189 | break; |
| 190 | } |
| 191 | } |
| 192 | |
| 193 | // This function will return a Block::BlockStatus that is either VALID or |
| 194 | // indicates the reason why the Block is invalid. If the Block is invalid at |
| 195 | // multiple points, this function will only return one of the reasons. |
| 196 | Block::BlockStatus Block::CheckStatus() const { |
| 197 | // Make sure the Block is aligned. |
| 198 | if (reinterpret_cast<uintptr_t>(this) % alignof(Block) != 0) { |
| 199 | return BlockStatus::MISALIGNED; |
| 200 | } |
| 201 | |
| 202 | // Test if the prev/next pointer for this Block matches. |
| 203 | if (!Last() && (this >= Next() || this != Next()->Prev())) { |
| 204 | return BlockStatus::NEXT_MISMATCHED; |
| 205 | } |
| 206 | |
| 207 | if (Prev() && (this <= Prev() || this != Prev()->Next())) { |
| 208 | return BlockStatus::PREV_MISMATCHED; |
| 209 | } |
| 210 | |
| 211 | #if PW_ALLOCATOR_POISON_ENABLE |
| 212 | if (!this->CheckPoisonBytes()) { |
| 213 | return BlockStatus::POISON_CORRUPTED; |
| 214 | } |
| 215 | #endif // PW_ALLOCATOR_POISON_ENABLE |
| 216 | return BlockStatus::VALID; |
| 217 | } |
| 218 | |
| 219 | // Paint sizeof(void*) bytes before and after the usable space in Block as the |
| 220 | // randomized function pattern. |
| 221 | void Block::PoisonBlock() { |
| 222 | #if PW_ALLOCATOR_POISON_ENABLE |
| 223 | std::byte* front_region = reinterpret_cast<std::byte*>(this) + sizeof(*this); |
| 224 | memcpy(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET); |
| 225 | |
| 226 | std::byte* end_region = |
| 227 | reinterpret_cast<std::byte*>(Next()) - PW_ALLOCATOR_POISON_OFFSET; |
| 228 | memcpy(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET); |
| 229 | #endif // PW_ALLOCATOR_POISON_ENABLE |
| 230 | } |
| 231 | |
| 232 | bool Block::CheckPoisonBytes() const { |
| 233 | #if PW_ALLOCATOR_POISON_ENABLE |
| 234 | std::byte* front_region = reinterpret_cast<std::byte*>( |
| 235 | reinterpret_cast<intptr_t>(this) + sizeof(*this)); |
| 236 | if (std::memcmp(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) { |
| 237 | return false; |
| 238 | } |
| 239 | std::byte* end_region = reinterpret_cast<std::byte*>( |
| 240 | reinterpret_cast<intptr_t>(this->Next()) - PW_ALLOCATOR_POISON_OFFSET); |
| 241 | if (std::memcmp(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) { |
| 242 | return false; |
| 243 | } |
| 244 | #endif // PW_ALLOCATOR_POISON_ENABLE |
| 245 | return true; |
| 246 | } |
| 247 | |
Jamie Garside | 558e144 | 2020-03-27 17:05:55 +0000 | [diff] [blame] | 248 | } // namespace pw::allocator |