Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 1 | //===-- DWARFDebugAranges.cpp -----------------------------------*- C++ -*-===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | |
| 10 | #include "DWARFDebugAranges.h" |
| 11 | #include "DWARFCompileUnit.h" |
| 12 | #include "DWARFContext.h" |
| 13 | #include "llvm/Support/Format.h" |
| 14 | #include "llvm/Support/raw_ostream.h" |
| 15 | #include <algorithm> |
| 16 | #include <cassert> |
| 17 | using namespace llvm; |
| 18 | |
| 19 | // Compare function DWARFDebugAranges::Range structures |
| 20 | static bool RangeLessThan(const DWARFDebugAranges::Range &range1, |
| 21 | const DWARFDebugAranges::Range &range2) { |
| 22 | return range1.LoPC < range2.LoPC; |
| 23 | } |
| 24 | |
| 25 | namespace { |
| 26 | class CountArangeDescriptors { |
| 27 | public: |
| 28 | CountArangeDescriptors(uint32_t &count_ref) : Count(count_ref) {} |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 29 | void operator()(const DWARFDebugArangeSet &Set) { |
| 30 | Count += Set.getNumDescriptors(); |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 31 | } |
| 32 | uint32_t &Count; |
| 33 | }; |
| 34 | |
| 35 | class AddArangeDescriptors { |
| 36 | public: |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 37 | AddArangeDescriptors(DWARFDebugAranges::RangeColl &Ranges, |
| 38 | DWARFDebugAranges::ParsedCUOffsetColl &CUOffsets) |
| 39 | : RangeCollection(Ranges), |
| 40 | CUOffsetCollection(CUOffsets) {} |
| 41 | void operator()(const DWARFDebugArangeSet &Set) { |
| 42 | DWARFDebugAranges::Range Range; |
| 43 | Range.Offset = Set.getCompileUnitDIEOffset(); |
| 44 | CUOffsetCollection.insert(Range.Offset); |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 45 | |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 46 | for (uint32_t i = 0, n = Set.getNumDescriptors(); i < n; ++i) { |
| 47 | const DWARFDebugArangeSet::Descriptor *ArangeDescPtr = |
| 48 | Set.getDescriptor(i); |
| 49 | Range.LoPC = ArangeDescPtr->Address; |
| 50 | Range.Length = ArangeDescPtr->Length; |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 51 | |
| 52 | // Insert each item in increasing address order so binary searching |
| 53 | // can later be done! |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 54 | DWARFDebugAranges::RangeColl::iterator InsertPos = |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 55 | std::lower_bound(RangeCollection.begin(), RangeCollection.end(), |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 56 | Range, RangeLessThan); |
| 57 | RangeCollection.insert(InsertPos, Range); |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 58 | } |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 59 | |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 60 | } |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 61 | DWARFDebugAranges::RangeColl &RangeCollection; |
| 62 | DWARFDebugAranges::ParsedCUOffsetColl &CUOffsetCollection; |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 63 | }; |
| 64 | } |
| 65 | |
| 66 | bool DWARFDebugAranges::extract(DataExtractor debug_aranges_data) { |
| 67 | if (debug_aranges_data.isValidOffset(0)) { |
| 68 | uint32_t offset = 0; |
| 69 | |
| 70 | typedef std::vector<DWARFDebugArangeSet> SetCollection; |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 71 | SetCollection sets; |
| 72 | |
| 73 | DWARFDebugArangeSet set; |
| 74 | Range range; |
| 75 | while (set.extract(debug_aranges_data, &offset)) |
| 76 | sets.push_back(set); |
| 77 | |
| 78 | uint32_t count = 0; |
| 79 | |
| 80 | std::for_each(sets.begin(), sets.end(), CountArangeDescriptors(count)); |
| 81 | |
| 82 | if (count > 0) { |
| 83 | Aranges.reserve(count); |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 84 | AddArangeDescriptors range_adder(Aranges, ParsedCUOffsets); |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 85 | std::for_each(sets.begin(), sets.end(), range_adder); |
| 86 | } |
| 87 | } |
| 88 | return false; |
| 89 | } |
| 90 | |
Benjamin Kramer | 10df806 | 2011-09-14 20:52:27 +0000 | [diff] [blame] | 91 | bool DWARFDebugAranges::generate(DWARFContext *ctx) { |
Benjamin Kramer | 10df806 | 2011-09-14 20:52:27 +0000 | [diff] [blame] | 92 | if (ctx) { |
| 93 | const uint32_t num_compile_units = ctx->getNumCompileUnits(); |
| 94 | for (uint32_t cu_idx = 0; cu_idx < num_compile_units; ++cu_idx) { |
Alexey Samsonov | 63a450a | 2012-11-16 08:36:25 +0000 | [diff] [blame^] | 95 | if (DWARFCompileUnit *cu = ctx->getCompileUnitAtIndex(cu_idx)) { |
| 96 | uint32_t CUOffset = cu->getOffset(); |
| 97 | if (ParsedCUOffsets.insert(CUOffset).second) |
| 98 | cu->buildAddressRangeTable(this, true); |
| 99 | } |
Benjamin Kramer | 10df806 | 2011-09-14 20:52:27 +0000 | [diff] [blame] | 100 | } |
| 101 | } |
Alexey Samsonov | 3e25c4a | 2012-07-02 05:54:45 +0000 | [diff] [blame] | 102 | sort(true, /* overlap size */ 0); |
Benjamin Kramer | 10df806 | 2011-09-14 20:52:27 +0000 | [diff] [blame] | 103 | return !isEmpty(); |
| 104 | } |
| 105 | |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 106 | void DWARFDebugAranges::dump(raw_ostream &OS) const { |
| 107 | const uint32_t num_ranges = getNumRanges(); |
| 108 | for (uint32_t i = 0; i < num_ranges; ++i) { |
| 109 | const Range &range = Aranges[i]; |
Benjamin Kramer | 41a9649 | 2011-11-05 08:57:40 +0000 | [diff] [blame] | 110 | OS << format("0x%8.8x: [0x%8.8" PRIx64 " - 0x%8.8" PRIx64 ")\n", |
| 111 | range.Offset, (uint64_t)range.LoPC, (uint64_t)range.HiPC()); |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 112 | } |
| 113 | } |
| 114 | |
| 115 | void DWARFDebugAranges::Range::dump(raw_ostream &OS) const { |
Benjamin Kramer | 41a9649 | 2011-11-05 08:57:40 +0000 | [diff] [blame] | 116 | OS << format("{0x%8.8x}: [0x%8.8" PRIx64 " - 0x%8.8" PRIx64 ")\n", |
| 117 | Offset, LoPC, HiPC()); |
Benjamin Kramer | 358f4fd | 2011-09-14 01:09:52 +0000 | [diff] [blame] | 118 | } |
| 119 | |
| 120 | void DWARFDebugAranges::appendRange(uint32_t offset, uint64_t low_pc, |
| 121 | uint64_t high_pc) { |
| 122 | if (!Aranges.empty()) { |
| 123 | if (Aranges.back().Offset == offset && Aranges.back().HiPC() == low_pc) { |
| 124 | Aranges.back().setHiPC(high_pc); |
| 125 | return; |
| 126 | } |
| 127 | } |
| 128 | Aranges.push_back(Range(low_pc, high_pc, offset)); |
| 129 | } |
| 130 | |
| 131 | void DWARFDebugAranges::sort(bool minimize, uint32_t n) { |
| 132 | const size_t orig_arange_size = Aranges.size(); |
| 133 | // Size of one? If so, no sorting is needed |
| 134 | if (orig_arange_size <= 1) |
| 135 | return; |
| 136 | // Sort our address range entries |
| 137 | std::stable_sort(Aranges.begin(), Aranges.end(), RangeLessThan); |
| 138 | |
| 139 | if (!minimize) |
| 140 | return; |
| 141 | |
| 142 | // Most address ranges are contiguous from function to function |
| 143 | // so our new ranges will likely be smaller. We calculate the size |
| 144 | // of the new ranges since although std::vector objects can be resized, |
| 145 | // the will never reduce their allocated block size and free any excesss |
| 146 | // memory, so we might as well start a brand new collection so it is as |
| 147 | // small as possible. |
| 148 | |
| 149 | // First calculate the size of the new minimal arange vector |
| 150 | // so we don't have to do a bunch of re-allocations as we |
| 151 | // copy the new minimal stuff over to the new collection. |
| 152 | size_t minimal_size = 1; |
| 153 | for (size_t i = 1; i < orig_arange_size; ++i) { |
| 154 | if (!Range::SortedOverlapCheck(Aranges[i-1], Aranges[i], n)) |
| 155 | ++minimal_size; |
| 156 | } |
| 157 | |
| 158 | // If the sizes are the same, then no consecutive aranges can be |
| 159 | // combined, we are done. |
| 160 | if (minimal_size == orig_arange_size) |
| 161 | return; |
| 162 | |
| 163 | // Else, make a new RangeColl that _only_ contains what we need. |
| 164 | RangeColl minimal_aranges; |
| 165 | minimal_aranges.resize(minimal_size); |
| 166 | uint32_t j = 0; |
| 167 | minimal_aranges[j] = Aranges[0]; |
| 168 | for (size_t i = 1; i < orig_arange_size; ++i) { |
| 169 | if(Range::SortedOverlapCheck (minimal_aranges[j], Aranges[i], n)) { |
| 170 | minimal_aranges[j].setHiPC (Aranges[i].HiPC()); |
| 171 | } else { |
| 172 | // Only increment j if we aren't merging |
| 173 | minimal_aranges[++j] = Aranges[i]; |
| 174 | } |
| 175 | } |
| 176 | assert (j+1 == minimal_size); |
| 177 | |
| 178 | // Now swap our new minimal aranges into place. The local |
| 179 | // minimal_aranges will then contian the old big collection |
| 180 | // which will get freed. |
| 181 | minimal_aranges.swap(Aranges); |
| 182 | } |
| 183 | |
| 184 | uint32_t DWARFDebugAranges::findAddress(uint64_t address) const { |
| 185 | if (!Aranges.empty()) { |
| 186 | Range range(address); |
| 187 | RangeCollIterator begin = Aranges.begin(); |
| 188 | RangeCollIterator end = Aranges.end(); |
| 189 | RangeCollIterator pos = lower_bound(begin, end, range, RangeLessThan); |
| 190 | |
| 191 | if (pos != end && pos->LoPC <= address && address < pos->HiPC()) { |
| 192 | return pos->Offset; |
| 193 | } else if (pos != begin) { |
| 194 | --pos; |
| 195 | if (pos->LoPC <= address && address < pos->HiPC()) |
| 196 | return (*pos).Offset; |
| 197 | } |
| 198 | } |
| 199 | return -1U; |
| 200 | } |
| 201 | |
| 202 | bool |
| 203 | DWARFDebugAranges::allRangesAreContiguous(uint64_t &LoPC, uint64_t &HiPC) const{ |
| 204 | if (Aranges.empty()) |
| 205 | return false; |
| 206 | |
| 207 | uint64_t next_addr = 0; |
| 208 | RangeCollIterator begin = Aranges.begin(); |
| 209 | for (RangeCollIterator pos = begin, end = Aranges.end(); pos != end; |
| 210 | ++pos) { |
| 211 | if (pos != begin && pos->LoPC != next_addr) |
| 212 | return false; |
| 213 | next_addr = pos->HiPC(); |
| 214 | } |
| 215 | // We checked for empty at the start of function so front() will be valid. |
| 216 | LoPC = Aranges.front().LoPC; |
| 217 | // We checked for empty at the start of function so back() will be valid. |
| 218 | HiPC = Aranges.back().HiPC(); |
| 219 | return true; |
| 220 | } |
| 221 | |
| 222 | bool DWARFDebugAranges::getMaxRange(uint64_t &LoPC, uint64_t &HiPC) const { |
| 223 | if (Aranges.empty()) |
| 224 | return false; |
| 225 | // We checked for empty at the start of function so front() will be valid. |
| 226 | LoPC = Aranges.front().LoPC; |
| 227 | // We checked for empty at the start of function so back() will be valid. |
| 228 | HiPC = Aranges.back().HiPC(); |
| 229 | return true; |
| 230 | } |