blob: ef470e5799cdfddfadd2aef338b5224360c38b6a [file] [log] [blame]
Benjamin Kramer358f4fd2011-09-14 01:09:52 +00001//===-- DWARFDebugAranges.cpp -----------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "DWARFDebugAranges.h"
11#include "DWARFCompileUnit.h"
12#include "DWARFContext.h"
13#include "llvm/Support/Format.h"
14#include "llvm/Support/raw_ostream.h"
15#include <algorithm>
16#include <cassert>
17using namespace llvm;
18
19// Compare function DWARFDebugAranges::Range structures
20static bool RangeLessThan(const DWARFDebugAranges::Range &range1,
21 const DWARFDebugAranges::Range &range2) {
22 return range1.LoPC < range2.LoPC;
23}
24
25namespace {
26 class CountArangeDescriptors {
27 public:
28 CountArangeDescriptors(uint32_t &count_ref) : Count(count_ref) {}
29 void operator()(const DWARFDebugArangeSet &set) {
30 Count += set.getNumDescriptors();
31 }
32 uint32_t &Count;
33 };
34
35 class AddArangeDescriptors {
36 public:
37 AddArangeDescriptors(DWARFDebugAranges::RangeColl &ranges)
38 : RangeCollection(ranges) {}
39 void operator()(const DWARFDebugArangeSet& set) {
40 const DWARFDebugArangeSet::Descriptor* arange_desc_ptr;
41 DWARFDebugAranges::Range range;
42 range.Offset = set.getCompileUnitDIEOffset();
43
44 for (uint32_t i=0; (arange_desc_ptr = set.getDescriptor(i)) != NULL; ++i){
45 range.LoPC = arange_desc_ptr->Address;
46 range.Length = arange_desc_ptr->Length;
47
48 // Insert each item in increasing address order so binary searching
49 // can later be done!
50 DWARFDebugAranges::RangeColl::iterator insert_pos =
51 std::lower_bound(RangeCollection.begin(), RangeCollection.end(),
52 range, RangeLessThan);
53 RangeCollection.insert(insert_pos, range);
54 }
55 }
56 DWARFDebugAranges::RangeColl& RangeCollection;
57 };
58}
59
60bool DWARFDebugAranges::extract(DataExtractor debug_aranges_data) {
61 if (debug_aranges_data.isValidOffset(0)) {
62 uint32_t offset = 0;
63
64 typedef std::vector<DWARFDebugArangeSet> SetCollection;
65 typedef SetCollection::const_iterator SetCollectionIter;
66 SetCollection sets;
67
68 DWARFDebugArangeSet set;
69 Range range;
70 while (set.extract(debug_aranges_data, &offset))
71 sets.push_back(set);
72
73 uint32_t count = 0;
74
75 std::for_each(sets.begin(), sets.end(), CountArangeDescriptors(count));
76
77 if (count > 0) {
78 Aranges.reserve(count);
79 AddArangeDescriptors range_adder(Aranges);
80 std::for_each(sets.begin(), sets.end(), range_adder);
81 }
82 }
83 return false;
84}
85
Benjamin Kramer10df8062011-09-14 20:52:27 +000086bool DWARFDebugAranges::generate(DWARFContext *ctx) {
87 clear();
88 if (ctx) {
89 const uint32_t num_compile_units = ctx->getNumCompileUnits();
90 for (uint32_t cu_idx = 0; cu_idx < num_compile_units; ++cu_idx) {
91 DWARFCompileUnit *cu = ctx->getCompileUnitAtIndex(cu_idx);
92 if (cu)
93 cu->buildAddressRangeTable(this, true);
94 }
95 }
Alexey Samsonov3e25c4a2012-07-02 05:54:45 +000096 sort(true, /* overlap size */ 0);
Benjamin Kramer10df8062011-09-14 20:52:27 +000097 return !isEmpty();
98}
99
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000100void DWARFDebugAranges::dump(raw_ostream &OS) const {
101 const uint32_t num_ranges = getNumRanges();
102 for (uint32_t i = 0; i < num_ranges; ++i) {
103 const Range &range = Aranges[i];
Benjamin Kramer41a96492011-11-05 08:57:40 +0000104 OS << format("0x%8.8x: [0x%8.8" PRIx64 " - 0x%8.8" PRIx64 ")\n",
105 range.Offset, (uint64_t)range.LoPC, (uint64_t)range.HiPC());
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000106 }
107}
108
109void DWARFDebugAranges::Range::dump(raw_ostream &OS) const {
Benjamin Kramer41a96492011-11-05 08:57:40 +0000110 OS << format("{0x%8.8x}: [0x%8.8" PRIx64 " - 0x%8.8" PRIx64 ")\n",
111 Offset, LoPC, HiPC());
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000112}
113
114void DWARFDebugAranges::appendRange(uint32_t offset, uint64_t low_pc,
115 uint64_t high_pc) {
116 if (!Aranges.empty()) {
117 if (Aranges.back().Offset == offset && Aranges.back().HiPC() == low_pc) {
118 Aranges.back().setHiPC(high_pc);
119 return;
120 }
121 }
122 Aranges.push_back(Range(low_pc, high_pc, offset));
123}
124
125void DWARFDebugAranges::sort(bool minimize, uint32_t n) {
126 const size_t orig_arange_size = Aranges.size();
127 // Size of one? If so, no sorting is needed
128 if (orig_arange_size <= 1)
129 return;
130 // Sort our address range entries
131 std::stable_sort(Aranges.begin(), Aranges.end(), RangeLessThan);
132
133 if (!minimize)
134 return;
135
136 // Most address ranges are contiguous from function to function
137 // so our new ranges will likely be smaller. We calculate the size
138 // of the new ranges since although std::vector objects can be resized,
139 // the will never reduce their allocated block size and free any excesss
140 // memory, so we might as well start a brand new collection so it is as
141 // small as possible.
142
143 // First calculate the size of the new minimal arange vector
144 // so we don't have to do a bunch of re-allocations as we
145 // copy the new minimal stuff over to the new collection.
146 size_t minimal_size = 1;
147 for (size_t i = 1; i < orig_arange_size; ++i) {
148 if (!Range::SortedOverlapCheck(Aranges[i-1], Aranges[i], n))
149 ++minimal_size;
150 }
151
152 // If the sizes are the same, then no consecutive aranges can be
153 // combined, we are done.
154 if (minimal_size == orig_arange_size)
155 return;
156
157 // Else, make a new RangeColl that _only_ contains what we need.
158 RangeColl minimal_aranges;
159 minimal_aranges.resize(minimal_size);
160 uint32_t j = 0;
161 minimal_aranges[j] = Aranges[0];
162 for (size_t i = 1; i < orig_arange_size; ++i) {
163 if(Range::SortedOverlapCheck (minimal_aranges[j], Aranges[i], n)) {
164 minimal_aranges[j].setHiPC (Aranges[i].HiPC());
165 } else {
166 // Only increment j if we aren't merging
167 minimal_aranges[++j] = Aranges[i];
168 }
169 }
170 assert (j+1 == minimal_size);
171
172 // Now swap our new minimal aranges into place. The local
173 // minimal_aranges will then contian the old big collection
174 // which will get freed.
175 minimal_aranges.swap(Aranges);
176}
177
178uint32_t DWARFDebugAranges::findAddress(uint64_t address) const {
179 if (!Aranges.empty()) {
180 Range range(address);
181 RangeCollIterator begin = Aranges.begin();
182 RangeCollIterator end = Aranges.end();
183 RangeCollIterator pos = lower_bound(begin, end, range, RangeLessThan);
184
185 if (pos != end && pos->LoPC <= address && address < pos->HiPC()) {
186 return pos->Offset;
187 } else if (pos != begin) {
188 --pos;
189 if (pos->LoPC <= address && address < pos->HiPC())
190 return (*pos).Offset;
191 }
192 }
193 return -1U;
194}
195
196bool
197DWARFDebugAranges::allRangesAreContiguous(uint64_t &LoPC, uint64_t &HiPC) const{
198 if (Aranges.empty())
199 return false;
200
201 uint64_t next_addr = 0;
202 RangeCollIterator begin = Aranges.begin();
203 for (RangeCollIterator pos = begin, end = Aranges.end(); pos != end;
204 ++pos) {
205 if (pos != begin && pos->LoPC != next_addr)
206 return false;
207 next_addr = pos->HiPC();
208 }
209 // We checked for empty at the start of function so front() will be valid.
210 LoPC = Aranges.front().LoPC;
211 // We checked for empty at the start of function so back() will be valid.
212 HiPC = Aranges.back().HiPC();
213 return true;
214}
215
216bool DWARFDebugAranges::getMaxRange(uint64_t &LoPC, uint64_t &HiPC) const {
217 if (Aranges.empty())
218 return false;
219 // We checked for empty at the start of function so front() will be valid.
220 LoPC = Aranges.front().LoPC;
221 // We checked for empty at the start of function so back() will be valid.
222 HiPC = Aranges.back().HiPC();
223 return true;
224}