blob: 56de5ea28de7593c6d4b4147e410d33517a558cc [file] [log] [blame]
Benjamin Kramer358f4fd2011-09-14 01:09:52 +00001//===-- DWARFDebugAranges.cpp -----------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "DWARFDebugAranges.h"
11#include "DWARFCompileUnit.h"
12#include "DWARFContext.h"
13#include "llvm/Support/Format.h"
14#include "llvm/Support/raw_ostream.h"
15#include <algorithm>
16#include <cassert>
17using namespace llvm;
18
19// Compare function DWARFDebugAranges::Range structures
20static bool RangeLessThan(const DWARFDebugAranges::Range &range1,
21 const DWARFDebugAranges::Range &range2) {
22 return range1.LoPC < range2.LoPC;
23}
24
25namespace {
26 class CountArangeDescriptors {
27 public:
28 CountArangeDescriptors(uint32_t &count_ref) : Count(count_ref) {}
Alexey Samsonov63a450a2012-11-16 08:36:25 +000029 void operator()(const DWARFDebugArangeSet &Set) {
30 Count += Set.getNumDescriptors();
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000031 }
32 uint32_t &Count;
33 };
34
35 class AddArangeDescriptors {
36 public:
Alexey Samsonov63a450a2012-11-16 08:36:25 +000037 AddArangeDescriptors(DWARFDebugAranges::RangeColl &Ranges,
38 DWARFDebugAranges::ParsedCUOffsetColl &CUOffsets)
39 : RangeCollection(Ranges),
40 CUOffsetCollection(CUOffsets) {}
41 void operator()(const DWARFDebugArangeSet &Set) {
42 DWARFDebugAranges::Range Range;
43 Range.Offset = Set.getCompileUnitDIEOffset();
44 CUOffsetCollection.insert(Range.Offset);
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000045
Alexey Samsonov63a450a2012-11-16 08:36:25 +000046 for (uint32_t i = 0, n = Set.getNumDescriptors(); i < n; ++i) {
47 const DWARFDebugArangeSet::Descriptor *ArangeDescPtr =
48 Set.getDescriptor(i);
49 Range.LoPC = ArangeDescPtr->Address;
50 Range.Length = ArangeDescPtr->Length;
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000051
52 // Insert each item in increasing address order so binary searching
53 // can later be done!
Alexey Samsonov63a450a2012-11-16 08:36:25 +000054 DWARFDebugAranges::RangeColl::iterator InsertPos =
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000055 std::lower_bound(RangeCollection.begin(), RangeCollection.end(),
Alexey Samsonov63a450a2012-11-16 08:36:25 +000056 Range, RangeLessThan);
57 RangeCollection.insert(InsertPos, Range);
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000058 }
Alexey Samsonov63a450a2012-11-16 08:36:25 +000059
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000060 }
Alexey Samsonov63a450a2012-11-16 08:36:25 +000061 DWARFDebugAranges::RangeColl &RangeCollection;
62 DWARFDebugAranges::ParsedCUOffsetColl &CUOffsetCollection;
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000063 };
64}
65
66bool DWARFDebugAranges::extract(DataExtractor debug_aranges_data) {
67 if (debug_aranges_data.isValidOffset(0)) {
68 uint32_t offset = 0;
69
70 typedef std::vector<DWARFDebugArangeSet> SetCollection;
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000071 SetCollection sets;
72
73 DWARFDebugArangeSet set;
74 Range range;
75 while (set.extract(debug_aranges_data, &offset))
76 sets.push_back(set);
77
78 uint32_t count = 0;
79
80 std::for_each(sets.begin(), sets.end(), CountArangeDescriptors(count));
81
82 if (count > 0) {
83 Aranges.reserve(count);
Alexey Samsonov63a450a2012-11-16 08:36:25 +000084 AddArangeDescriptors range_adder(Aranges, ParsedCUOffsets);
Benjamin Kramer358f4fd2011-09-14 01:09:52 +000085 std::for_each(sets.begin(), sets.end(), range_adder);
86 }
87 }
88 return false;
89}
90
Benjamin Kramer10df8062011-09-14 20:52:27 +000091bool DWARFDebugAranges::generate(DWARFContext *ctx) {
Benjamin Kramer10df8062011-09-14 20:52:27 +000092 if (ctx) {
93 const uint32_t num_compile_units = ctx->getNumCompileUnits();
94 for (uint32_t cu_idx = 0; cu_idx < num_compile_units; ++cu_idx) {
Alexey Samsonov63a450a2012-11-16 08:36:25 +000095 if (DWARFCompileUnit *cu = ctx->getCompileUnitAtIndex(cu_idx)) {
96 uint32_t CUOffset = cu->getOffset();
97 if (ParsedCUOffsets.insert(CUOffset).second)
Alexey Samsonov63fd2af2013-08-27 09:20:22 +000098 cu->buildAddressRangeTable(this, true, CUOffset);
Alexey Samsonov63a450a2012-11-16 08:36:25 +000099 }
Benjamin Kramer10df8062011-09-14 20:52:27 +0000100 }
101 }
Alexey Samsonov3e25c4a2012-07-02 05:54:45 +0000102 sort(true, /* overlap size */ 0);
Benjamin Kramer10df8062011-09-14 20:52:27 +0000103 return !isEmpty();
104}
105
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000106void DWARFDebugAranges::dump(raw_ostream &OS) const {
107 const uint32_t num_ranges = getNumRanges();
108 for (uint32_t i = 0; i < num_ranges; ++i) {
109 const Range &range = Aranges[i];
Benjamin Kramer41a96492011-11-05 08:57:40 +0000110 OS << format("0x%8.8x: [0x%8.8" PRIx64 " - 0x%8.8" PRIx64 ")\n",
111 range.Offset, (uint64_t)range.LoPC, (uint64_t)range.HiPC());
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000112 }
113}
114
115void DWARFDebugAranges::Range::dump(raw_ostream &OS) const {
Benjamin Kramer41a96492011-11-05 08:57:40 +0000116 OS << format("{0x%8.8x}: [0x%8.8" PRIx64 " - 0x%8.8" PRIx64 ")\n",
117 Offset, LoPC, HiPC());
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000118}
119
120void DWARFDebugAranges::appendRange(uint32_t offset, uint64_t low_pc,
121 uint64_t high_pc) {
122 if (!Aranges.empty()) {
123 if (Aranges.back().Offset == offset && Aranges.back().HiPC() == low_pc) {
124 Aranges.back().setHiPC(high_pc);
125 return;
126 }
127 }
128 Aranges.push_back(Range(low_pc, high_pc, offset));
129}
130
131void DWARFDebugAranges::sort(bool minimize, uint32_t n) {
132 const size_t orig_arange_size = Aranges.size();
133 // Size of one? If so, no sorting is needed
134 if (orig_arange_size <= 1)
135 return;
136 // Sort our address range entries
137 std::stable_sort(Aranges.begin(), Aranges.end(), RangeLessThan);
138
139 if (!minimize)
140 return;
141
142 // Most address ranges are contiguous from function to function
143 // so our new ranges will likely be smaller. We calculate the size
144 // of the new ranges since although std::vector objects can be resized,
145 // the will never reduce their allocated block size and free any excesss
146 // memory, so we might as well start a brand new collection so it is as
147 // small as possible.
148
149 // First calculate the size of the new minimal arange vector
150 // so we don't have to do a bunch of re-allocations as we
151 // copy the new minimal stuff over to the new collection.
152 size_t minimal_size = 1;
153 for (size_t i = 1; i < orig_arange_size; ++i) {
154 if (!Range::SortedOverlapCheck(Aranges[i-1], Aranges[i], n))
155 ++minimal_size;
156 }
157
158 // If the sizes are the same, then no consecutive aranges can be
159 // combined, we are done.
160 if (minimal_size == orig_arange_size)
161 return;
162
163 // Else, make a new RangeColl that _only_ contains what we need.
164 RangeColl minimal_aranges;
165 minimal_aranges.resize(minimal_size);
166 uint32_t j = 0;
167 minimal_aranges[j] = Aranges[0];
168 for (size_t i = 1; i < orig_arange_size; ++i) {
169 if(Range::SortedOverlapCheck (minimal_aranges[j], Aranges[i], n)) {
170 minimal_aranges[j].setHiPC (Aranges[i].HiPC());
171 } else {
172 // Only increment j if we aren't merging
173 minimal_aranges[++j] = Aranges[i];
174 }
175 }
176 assert (j+1 == minimal_size);
177
178 // Now swap our new minimal aranges into place. The local
179 // minimal_aranges will then contian the old big collection
180 // which will get freed.
181 minimal_aranges.swap(Aranges);
182}
183
184uint32_t DWARFDebugAranges::findAddress(uint64_t address) const {
185 if (!Aranges.empty()) {
186 Range range(address);
187 RangeCollIterator begin = Aranges.begin();
188 RangeCollIterator end = Aranges.end();
Matt Arsenault26c417b2013-03-21 00:57:21 +0000189 RangeCollIterator pos = std::lower_bound(begin, end, range, RangeLessThan);
Benjamin Kramer358f4fd2011-09-14 01:09:52 +0000190
191 if (pos != end && pos->LoPC <= address && address < pos->HiPC()) {
192 return pos->Offset;
193 } else if (pos != begin) {
194 --pos;
195 if (pos->LoPC <= address && address < pos->HiPC())
196 return (*pos).Offset;
197 }
198 }
199 return -1U;
200}
201
202bool
203DWARFDebugAranges::allRangesAreContiguous(uint64_t &LoPC, uint64_t &HiPC) const{
204 if (Aranges.empty())
205 return false;
206
207 uint64_t next_addr = 0;
208 RangeCollIterator begin = Aranges.begin();
209 for (RangeCollIterator pos = begin, end = Aranges.end(); pos != end;
210 ++pos) {
211 if (pos != begin && pos->LoPC != next_addr)
212 return false;
213 next_addr = pos->HiPC();
214 }
215 // We checked for empty at the start of function so front() will be valid.
216 LoPC = Aranges.front().LoPC;
217 // We checked for empty at the start of function so back() will be valid.
218 HiPC = Aranges.back().HiPC();
219 return true;
220}
221
222bool DWARFDebugAranges::getMaxRange(uint64_t &LoPC, uint64_t &HiPC) const {
223 if (Aranges.empty())
224 return false;
225 // We checked for empty at the start of function so front() will be valid.
226 LoPC = Aranges.front().LoPC;
227 // We checked for empty at the start of function so back() will be valid.
228 HiPC = Aranges.back().HiPC();
229 return true;
230}