blob: 234092c12c34d3e3c8d2e7ff15f98de9e8e852a4 [file] [log] [blame]
Chris Lattner5fd3e262008-04-14 17:54:23 +00001//===--- RewriteRope.cpp - Rope specialized for rewriter --------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the RewriteRope class, which is a powerful string.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/Rewrite/RewriteRope.h"
15#include "llvm/Support/Casting.h"
Chris Lattner5618d882008-04-14 21:41:00 +000016#include <algorithm>
Chris Lattner5fd3e262008-04-14 17:54:23 +000017using namespace clang;
18using llvm::dyn_cast;
19using llvm::cast;
20
21
Chris Lattner5fd3e262008-04-14 17:54:23 +000022/// This is an adapted B+ Tree, ... erases don't keep the tree balanced.
23
Chris Lattner5fd3e262008-04-14 17:54:23 +000024//===----------------------------------------------------------------------===//
25// RopePieceBTreeNode Class
26//===----------------------------------------------------------------------===//
27
28namespace {
29 class RopePieceBTreeNode {
30 protected:
31 /// WidthFactor - This controls the number of K/V slots held in the BTree:
32 /// how wide it is. Each level of the BTree is guaranteed to have at least
33 /// 'WidthFactor' elements in it (either ropepieces or children), (except
34 /// the root, which may have less) and may have at most 2*WidthFactor
35 /// elements.
36 enum { WidthFactor = 8 };
37
38 /// Size - This is the number of bytes of file this node (including any
39 /// potential children) covers.
40 unsigned Size;
41
42 /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
43 /// is an instance of RopePieceBTreeInterior.
44 bool IsLeaf;
45
Chris Lattnerb442e212008-04-14 20:05:32 +000046 RopePieceBTreeNode(bool isLeaf) : Size(0), IsLeaf(isLeaf) {}
Chris Lattner5fd3e262008-04-14 17:54:23 +000047 ~RopePieceBTreeNode() {}
48 public:
49
50 bool isLeaf() const { return IsLeaf; }
51 unsigned size() const { return Size; }
52
53 void Destroy();
54
55 /// split - Split the range containing the specified offset so that we are
56 /// guaranteed that there is a place to do an insertion at the specified
Chris Lattnerbf268562008-04-14 22:10:58 +000057 /// offset. The offset is relative, so "0" is the start of the node.
58 ///
59 /// If there is no space in this subtree for the extra piece, the extra tree
60 /// node is returned and must be inserted into a parent.
61 RopePieceBTreeNode *split(unsigned Offset);
Chris Lattner5fd3e262008-04-14 17:54:23 +000062
63 /// insert - Insert the specified ropepiece into this tree node at the
64 /// specified offset. The offset is relative, so "0" is the start of the
Chris Lattnerbf268562008-04-14 22:10:58 +000065 /// node.
66 ///
67 /// If there is no space in this subtree for the extra piece, the extra tree
68 /// node is returned and must be inserted into a parent.
69 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
Chris Lattner5fd3e262008-04-14 17:54:23 +000070
71 /// erase - Remove NumBytes from this node at the specified offset. We are
72 /// guaranteed that there is a split at Offset.
73 void erase(unsigned Offset, unsigned NumBytes);
74
75 static inline bool classof(const RopePieceBTreeNode *) { return true; }
76
77 };
78} // end anonymous namespace
79
80//===----------------------------------------------------------------------===//
81// RopePieceBTreeLeaf Class
82//===----------------------------------------------------------------------===//
83
84namespace {
85 class RopePieceBTreeLeaf : public RopePieceBTreeNode {
86 /// NumPieces - This holds the number of rope pieces currently active in the
87 /// Pieces array.
88 unsigned char NumPieces;
89
90 /// Pieces - This tracks the file chunks currently in this leaf.
91 ///
92 RopePiece Pieces[2*WidthFactor];
93
94 /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
95 /// efficient in-order forward iteration of the tree without traversal.
96 const RopePieceBTreeLeaf *NextLeaf;
97 public:
Chris Lattner70778c82008-04-14 20:07:03 +000098 RopePieceBTreeLeaf() : RopePieceBTreeNode(true), NumPieces(0), NextLeaf(0){}
Chris Lattner5fd3e262008-04-14 17:54:23 +000099
100 bool isFull() const { return NumPieces == 2*WidthFactor; }
101
102 /// clear - Remove all rope pieces from this leaf.
103 void clear() {
104 while (NumPieces)
105 Pieces[--NumPieces] = RopePiece();
106 Size = 0;
107 }
108
109 unsigned getNumPieces() const { return NumPieces; }
110
111 const RopePiece &getPiece(unsigned i) const {
112 assert(i < getNumPieces() && "Invalid piece ID");
113 return Pieces[i];
114 }
115
116 const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
117 void setNextLeafInOrder(const RopePieceBTreeLeaf *NL) { NextLeaf = NL; }
118
119 void FullRecomputeSizeLocally() {
120 Size = 0;
121 for (unsigned i = 0, e = getNumPieces(); i != e; ++i)
122 Size += getPiece(i).size();
123 }
124
125 /// split - Split the range containing the specified offset so that we are
126 /// guaranteed that there is a place to do an insertion at the specified
Chris Lattnerbf268562008-04-14 22:10:58 +0000127 /// offset. The offset is relative, so "0" is the start of the node.
128 ///
129 /// If there is no space in this subtree for the extra piece, the extra tree
130 /// node is returned and must be inserted into a parent.
131 RopePieceBTreeNode *split(unsigned Offset);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000132
133 /// insert - Insert the specified ropepiece into this tree node at the
134 /// specified offset. The offset is relative, so "0" is the start of the
Chris Lattnerbf268562008-04-14 22:10:58 +0000135 /// node.
136 ///
137 /// If there is no space in this subtree for the extra piece, the extra tree
138 /// node is returned and must be inserted into a parent.
139 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000140
141
142 /// erase - Remove NumBytes from this node at the specified offset. We are
143 /// guaranteed that there is a split at Offset.
144 void erase(unsigned Offset, unsigned NumBytes);
145
146 static inline bool classof(const RopePieceBTreeLeaf *) { return true; }
147 static inline bool classof(const RopePieceBTreeNode *N) {
148 return N->isLeaf();
149 }
150 };
151} // end anonymous namespace
152
153/// split - Split the range containing the specified offset so that we are
154/// guaranteed that there is a place to do an insertion at the specified
Chris Lattnerbf268562008-04-14 22:10:58 +0000155/// offset. The offset is relative, so "0" is the start of the node.
156///
157/// If there is no space in this subtree for the extra piece, the extra tree
158/// node is returned and must be inserted into a parent.
159RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000160 // Find the insertion point. We are guaranteed that there is a split at the
161 // specified offset so find it.
162 if (Offset == 0 || Offset == size()) {
163 // Fastpath for a common case. There is already a splitpoint at the end.
Chris Lattnerbf268562008-04-14 22:10:58 +0000164 return 0;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000165 }
166
167 // Find the piece that this offset lands in.
168 unsigned PieceOffs = 0;
169 unsigned i = 0;
170 while (Offset >= PieceOffs+Pieces[i].size()) {
171 PieceOffs += Pieces[i].size();
172 ++i;
173 }
174
175 // If there is already a split point at the specified offset, just return
176 // success.
177 if (PieceOffs == Offset)
Chris Lattnerbf268562008-04-14 22:10:58 +0000178 return 0;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000179
180 // Otherwise, we need to split piece 'i' at Offset-PieceOffs. Convert Offset
181 // to being Piece relative.
182 unsigned IntraPieceOffset = Offset-PieceOffs;
183
184 // We do this by shrinking the RopePiece and then doing an insert of the tail.
185 RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset,
186 Pieces[i].EndOffs);
187 Size -= Pieces[i].size();
188 Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset;
189 Size += Pieces[i].size();
190
Chris Lattnerbf268562008-04-14 22:10:58 +0000191 return insert(Offset, Tail);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000192}
193
194
195/// insert - Insert the specified RopePiece into this tree node at the
Chris Lattnerbf268562008-04-14 22:10:58 +0000196/// specified offset. The offset is relative, so "0" is the start of the node.
197///
198/// If there is no space in this subtree for the extra piece, the extra tree
199/// node is returned and must be inserted into a parent.
200RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset,
201 const RopePiece &R) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000202 // If this node is not full, insert the piece.
203 if (!isFull()) {
204 // Find the insertion point. We are guaranteed that there is a split at the
205 // specified offset so find it.
206 unsigned i = 0, e = getNumPieces();
207 if (Offset == size()) {
208 // Fastpath for a common case.
209 i = e;
210 } else {
211 unsigned SlotOffs = 0;
212 for (; Offset > SlotOffs; ++i)
213 SlotOffs += getPiece(i).size();
214 assert(SlotOffs == Offset && "Split didn't occur before insertion!");
215 }
216
217 // For an insertion into a non-full leaf node, just insert the value in
218 // its sorted position. This requires moving later values over.
219 for (; i != e; --e)
220 Pieces[e] = Pieces[e-1];
221 Pieces[i] = R;
222 ++NumPieces;
223 Size += R.size();
Chris Lattnerbf268562008-04-14 22:10:58 +0000224 return 0;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000225 }
226
227 // Otherwise, if this is leaf is full, split it in two halves. Since this
228 // node is full, it contains 2*WidthFactor values. We move the first
229 // 'WidthFactor' values to the LHS child (which we leave in this node) and
230 // move the last 'WidthFactor' values into the RHS child.
231
232 // Create the new node.
233 RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
234
235 // Move over the last 'WidthFactor' values from here to NewNode.
236 std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor],
237 &NewNode->Pieces[0]);
238 // Replace old pieces with null RopePieces to drop refcounts.
239 std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece());
240
241 // Decrease the number of values in the two nodes.
242 NewNode->NumPieces = NumPieces = WidthFactor;
243
244 // Recompute the two nodes' size.
245 NewNode->FullRecomputeSizeLocally();
246 FullRecomputeSizeLocally();
247
248 // Update the list of leaves.
249 NewNode->setNextLeafInOrder(this->getNextLeafInOrder());
250 this->setNextLeafInOrder(NewNode);
251
Chris Lattnerbf268562008-04-14 22:10:58 +0000252 // These insertions can't fail.
Chris Lattner5fd3e262008-04-14 17:54:23 +0000253 if (this->size() >= Offset)
Chris Lattnerbf268562008-04-14 22:10:58 +0000254 this->insert(Offset, R);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000255 else
Chris Lattnerbf268562008-04-14 22:10:58 +0000256 NewNode->insert(Offset - this->size(), R);
257 return NewNode;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000258}
259
260/// erase - Remove NumBytes from this node at the specified offset. We are
261/// guaranteed that there is a split at Offset.
262void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
263 // Since we are guaranteed that there is a split at Offset, we start by
264 // finding the Piece that starts there.
265 unsigned PieceOffs = 0;
266 unsigned i = 0;
267 for (; Offset > PieceOffs; ++i)
268 PieceOffs += getPiece(i).size();
269 assert(PieceOffs == Offset && "Split didn't occur before erase!");
270
271 unsigned StartPiece = i;
272
273 // Figure out how many pieces completely cover 'NumBytes'. We want to remove
274 // all of them.
275 for (; Offset+NumBytes > PieceOffs+getPiece(i).size(); ++i)
276 PieceOffs += getPiece(i).size();
277
278 // If we exactly include the last one, include it in the region to delete.
279 if (Offset+NumBytes == PieceOffs+getPiece(i).size())
280 PieceOffs += getPiece(i).size(), ++i;
281
282 // If we completely cover some RopePieces, erase them now.
283 if (i != StartPiece) {
284 unsigned NumDeleted = i-StartPiece;
285 for (; i != getNumPieces(); ++i)
286 Pieces[i-NumDeleted] = Pieces[i];
287
288 // Drop references to dead rope pieces.
289 std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()],
290 RopePiece());
291 NumPieces -= NumDeleted;
292
293 unsigned CoverBytes = PieceOffs-Offset;
294 NumBytes -= CoverBytes;
295 Size -= CoverBytes;
296 }
297
298 // If we completely removed some stuff, we could be done.
299 if (NumBytes == 0) return;
300
301 // Okay, now might be erasing part of some Piece. If this is the case, then
302 // move the start point of the piece.
303 assert(getPiece(StartPiece).size() > NumBytes);
304 Pieces[StartPiece].StartOffs += NumBytes;
305
306 // The size of this node just shrunk by NumBytes.
307 Size -= NumBytes;
308}
309
310//===----------------------------------------------------------------------===//
311// RopePieceBTreeInterior Class
312//===----------------------------------------------------------------------===//
313
314namespace {
315 // Holds up to 2*WidthFactor children.
316 class RopePieceBTreeInterior : public RopePieceBTreeNode {
317 /// NumChildren - This holds the number of children currently active in the
318 /// Children array.
319 unsigned char NumChildren;
320 RopePieceBTreeNode *Children[2*WidthFactor];
321 public:
Chris Lattner70778c82008-04-14 20:07:03 +0000322 RopePieceBTreeInterior() : RopePieceBTreeNode(false), NumChildren(0) {}
Chris Lattner5fd3e262008-04-14 17:54:23 +0000323
324 RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
325 : RopePieceBTreeNode(false) {
326 Children[0] = LHS;
327 Children[1] = RHS;
328 NumChildren = 2;
329 Size = LHS->size() + RHS->size();
330 }
331
332 bool isFull() const { return NumChildren == 2*WidthFactor; }
333
334 unsigned getNumChildren() const { return NumChildren; }
335 const RopePieceBTreeNode *getChild(unsigned i) const {
336 assert(i < NumChildren && "invalid child #");
337 return Children[i];
338 }
339 RopePieceBTreeNode *getChild(unsigned i) {
340 assert(i < NumChildren && "invalid child #");
341 return Children[i];
342 }
343
344 void FullRecomputeSizeLocally() {
345 Size = 0;
346 for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
347 Size += getChild(i)->size();
348 }
349
350
351 /// split - Split the range containing the specified offset so that we are
352 /// guaranteed that there is a place to do an insertion at the specified
Chris Lattnerbf268562008-04-14 22:10:58 +0000353 /// offset. The offset is relative, so "0" is the start of the node.
354 ///
355 /// If there is no space in this subtree for the extra piece, the extra tree
356 /// node is returned and must be inserted into a parent.
357 RopePieceBTreeNode *split(unsigned Offset);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000358
359
360 /// insert - Insert the specified ropepiece into this tree node at the
361 /// specified offset. The offset is relative, so "0" is the start of the
Chris Lattnerbf268562008-04-14 22:10:58 +0000362 /// node.
363 ///
364 /// If there is no space in this subtree for the extra piece, the extra tree
365 /// node is returned and must be inserted into a parent.
366 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000367
368 /// HandleChildPiece - A child propagated an insertion result up to us.
369 /// Insert the new child, and/or propagate the result further up the tree.
Chris Lattnerbf268562008-04-14 22:10:58 +0000370 RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000371
372 /// erase - Remove NumBytes from this node at the specified offset. We are
373 /// guaranteed that there is a split at Offset.
374 void erase(unsigned Offset, unsigned NumBytes);
375
376 static inline bool classof(const RopePieceBTreeInterior *) { return true; }
377 static inline bool classof(const RopePieceBTreeNode *N) {
378 return !N->isLeaf();
379 }
380 };
381} // end anonymous namespace
382
383/// split - Split the range containing the specified offset so that we are
384/// guaranteed that there is a place to do an insertion at the specified
Chris Lattnerbf268562008-04-14 22:10:58 +0000385/// offset. The offset is relative, so "0" is the start of the node.
386///
387/// If there is no space in this subtree for the extra piece, the extra tree
388/// node is returned and must be inserted into a parent.
389RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000390 // Figure out which child to split.
391 if (Offset == 0 || Offset == size())
Chris Lattnerbf268562008-04-14 22:10:58 +0000392 return 0; // If we have an exact offset, we're already split.
Chris Lattner5fd3e262008-04-14 17:54:23 +0000393
394 unsigned ChildOffset = 0;
395 unsigned i = 0;
396 for (; Offset >= ChildOffset+getChild(i)->size(); ++i)
397 ChildOffset += getChild(i)->size();
398
399 // If already split there, we're done.
400 if (ChildOffset == Offset)
Chris Lattnerbf268562008-04-14 22:10:58 +0000401 return 0;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000402
403 // Otherwise, recursively split the child.
Chris Lattnerbf268562008-04-14 22:10:58 +0000404 if (RopePieceBTreeNode *RHS = getChild(i)->split(Offset-ChildOffset))
405 return HandleChildPiece(i, RHS);
406 return 0; // Done!
Chris Lattner5fd3e262008-04-14 17:54:23 +0000407}
408
409/// insert - Insert the specified ropepiece into this tree node at the
410/// specified offset. The offset is relative, so "0" is the start of the
Chris Lattnerbf268562008-04-14 22:10:58 +0000411/// node.
412///
413/// If there is no space in this subtree for the extra piece, the extra tree
414/// node is returned and must be inserted into a parent.
415RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset,
416 const RopePiece &R) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000417 // Find the insertion point. We are guaranteed that there is a split at the
418 // specified offset so find it.
419 unsigned i = 0, e = getNumChildren();
420
421 unsigned ChildOffs = 0;
422 if (Offset == size()) {
423 // Fastpath for a common case. Insert at end of last child.
424 i = e-1;
425 ChildOffs = size()-getChild(i)->size();
426 } else {
427 for (; Offset > ChildOffs+getChild(i)->size(); ++i)
428 ChildOffs += getChild(i)->size();
429 }
430
431 Size += R.size();
432
433 // Insert at the end of this child.
Chris Lattnerbf268562008-04-14 22:10:58 +0000434 if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset-ChildOffs, R))
435 return HandleChildPiece(i, RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000436
Chris Lattnerbf268562008-04-14 22:10:58 +0000437 return 0;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000438}
439
440/// HandleChildPiece - A child propagated an insertion result up to us.
441/// Insert the new child, and/or propagate the result further up the tree.
Chris Lattnerbf268562008-04-14 22:10:58 +0000442RopePieceBTreeNode *
443RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000444 // Otherwise the child propagated a subtree up to us as a new child. See if
445 // we have space for it here.
446 if (!isFull()) {
Chris Lattnerbf268562008-04-14 22:10:58 +0000447 // Insert RHS after child 'i'.
Chris Lattner5fd3e262008-04-14 17:54:23 +0000448 if (i + 1 != getNumChildren())
449 memmove(&Children[i+2], &Children[i+1],
450 (getNumChildren()-i-1)*sizeof(Children[0]));
Chris Lattnerbf268562008-04-14 22:10:58 +0000451 Children[i+1] = RHS;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000452 ++NumChildren;
453 return false;
454 }
455
456 // Okay, this node is full. Split it in half, moving WidthFactor children to
457 // a newly allocated interior node.
458
459 // Create the new node.
460 RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
461
462 // Move over the last 'WidthFactor' values from here to NewNode.
463 memcpy(&NewNode->Children[0], &Children[WidthFactor],
464 WidthFactor*sizeof(Children[0]));
465
466 // Decrease the number of values in the two nodes.
467 NewNode->NumChildren = NumChildren = WidthFactor;
468
469 // Finally, insert the two new children in the side the can (now) hold them.
Chris Lattnerbf268562008-04-14 22:10:58 +0000470 // These insertions can't fail.
Chris Lattner5fd3e262008-04-14 17:54:23 +0000471 if (i < WidthFactor)
Chris Lattnerbf268562008-04-14 22:10:58 +0000472 this->HandleChildPiece(i, RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000473 else
Chris Lattnerbf268562008-04-14 22:10:58 +0000474 NewNode->HandleChildPiece(i-WidthFactor, RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000475
476 // Recompute the two nodes' size.
477 NewNode->FullRecomputeSizeLocally();
478 FullRecomputeSizeLocally();
Chris Lattnerbf268562008-04-14 22:10:58 +0000479 return NewNode;
Chris Lattner5fd3e262008-04-14 17:54:23 +0000480}
481
482/// erase - Remove NumBytes from this node at the specified offset. We are
483/// guaranteed that there is a split at Offset.
484void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
485 // This will shrink this node by NumBytes.
486 Size -= NumBytes;
487
488 // Find the first child that overlaps with Offset.
489 unsigned i = 0;
490 for (; Offset >= getChild(i)->size(); ++i)
491 Offset -= getChild(i)->size();
492
493 // Propagate the delete request into overlapping children, or completely
494 // delete the children as appropriate.
495 while (NumBytes) {
496 RopePieceBTreeNode *CurChild = getChild(i);
497
498 // If we are deleting something contained entirely in the child, pass on the
499 // request.
500 if (Offset+NumBytes < CurChild->size()) {
501 CurChild->erase(Offset, NumBytes);
502 return;
503 }
504
505 // If this deletion request starts somewhere in the middle of the child, it
506 // must be deleting to the end of the child.
507 if (Offset) {
508 unsigned BytesFromChild = CurChild->size()-Offset;
509 CurChild->erase(Offset, BytesFromChild);
510 NumBytes -= BytesFromChild;
511 ++i;
512 continue;
513 }
514
515 // If the deletion request completely covers the child, delete it and move
516 // the rest down.
517 NumBytes -= CurChild->size();
518 CurChild->Destroy();
519 --NumChildren;
520 if (i+1 != getNumChildren())
521 memmove(&Children[i], &Children[i+1],
522 (getNumChildren()-i)*sizeof(Children[0]));
523 }
524}
525
526//===----------------------------------------------------------------------===//
527// RopePieceBTreeNode Implementation
528//===----------------------------------------------------------------------===//
529
530void RopePieceBTreeNode::Destroy() {
531 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
532 delete Leaf;
533 else
534 delete cast<RopePieceBTreeInterior>(this);
535}
536
537/// split - Split the range containing the specified offset so that we are
538/// guaranteed that there is a place to do an insertion at the specified
Chris Lattnerbf268562008-04-14 22:10:58 +0000539/// offset. The offset is relative, so "0" is the start of the node.
540///
541/// If there is no space in this subtree for the extra piece, the extra tree
542/// node is returned and must be inserted into a parent.
543RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000544 assert(Offset <= size() && "Invalid offset to split!");
545 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
Chris Lattnerbf268562008-04-14 22:10:58 +0000546 return Leaf->split(Offset);
547 return cast<RopePieceBTreeInterior>(this)->split(Offset);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000548}
549
550/// insert - Insert the specified ropepiece into this tree node at the
551/// specified offset. The offset is relative, so "0" is the start of the
552/// node.
Chris Lattnerbf268562008-04-14 22:10:58 +0000553///
554/// If there is no space in this subtree for the extra piece, the extra tree
555/// node is returned and must be inserted into a parent.
556RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset,
557 const RopePiece &R) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000558 assert(Offset <= size() && "Invalid offset to insert!");
559 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
Chris Lattnerbf268562008-04-14 22:10:58 +0000560 return Leaf->insert(Offset, R);
561 return cast<RopePieceBTreeInterior>(this)->insert(Offset, R);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000562}
563
564/// erase - Remove NumBytes from this node at the specified offset. We are
565/// guaranteed that there is a split at Offset.
566void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
567 assert(Offset+NumBytes <= size() && "Invalid offset to erase!");
568 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
569 return Leaf->erase(Offset, NumBytes);
570 return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
571}
572
573
574//===----------------------------------------------------------------------===//
575// RopePieceBTreeIterator Implementation
576//===----------------------------------------------------------------------===//
577
578static const RopePieceBTreeLeaf *getCN(const void *P) {
579 return static_cast<const RopePieceBTreeLeaf*>(P);
580}
581
582// begin iterator.
583RopePieceBTreeIterator::RopePieceBTreeIterator(const void *n) {
584 const RopePieceBTreeNode *N = static_cast<const RopePieceBTreeNode*>(n);
585
586 // Walk down the left side of the tree until we get to a leaf.
587 while (const RopePieceBTreeInterior *IN = dyn_cast<RopePieceBTreeInterior>(N))
588 N = IN->getChild(0);
589
590 // We must have at least one leaf.
591 CurNode = cast<RopePieceBTreeLeaf>(N);
592
593 // If we found a leaf that happens to be empty, skip over it until we get
594 // to something full.
595 while (CurNode && getCN(CurNode)->getNumPieces() == 0)
596 CurNode = getCN(CurNode)->getNextLeafInOrder();
597
598 if (CurNode != 0)
599 CurPiece = &getCN(CurNode)->getPiece(0);
600 else // Empty tree, this is an end() iterator.
601 CurPiece = 0;
602 CurChar = 0;
603}
604
605void RopePieceBTreeIterator::MoveToNextPiece() {
606 if (CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)) {
607 CurChar = 0;
608 ++CurPiece;
609 return;
610 }
611
612 // Find the next non-empty leaf node.
613 do
614 CurNode = getCN(CurNode)->getNextLeafInOrder();
615 while (CurNode && getCN(CurNode)->getNumPieces() == 0);
616
617 if (CurNode != 0)
618 CurPiece = &getCN(CurNode)->getPiece(0);
619 else // Hit end().
620 CurPiece = 0;
621 CurChar = 0;
622}
623
624//===----------------------------------------------------------------------===//
625// RopePieceBTree Implementation
626//===----------------------------------------------------------------------===//
627
628static RopePieceBTreeNode *getRoot(void *P) {
629 return static_cast<RopePieceBTreeNode*>(P);
630}
631
632RopePieceBTree::RopePieceBTree() {
633 Root = new RopePieceBTreeLeaf();
634}
635RopePieceBTree::RopePieceBTree(const RopePieceBTree &RHS) {
636 assert(RHS.empty() && "Can't copy non-empty tree yet");
637 Root = new RopePieceBTreeLeaf();
638}
639RopePieceBTree::~RopePieceBTree() {
640 getRoot(Root)->Destroy();
641}
642
643unsigned RopePieceBTree::size() const {
644 return getRoot(Root)->size();
645}
646
647void RopePieceBTree::clear() {
648 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
649 Leaf->clear();
650 else {
651 getRoot(Root)->Destroy();
652 Root = new RopePieceBTreeLeaf();
653 }
654}
655
656void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000657 // #1. Split at Offset.
Chris Lattnerbf268562008-04-14 22:10:58 +0000658 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
659 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000660
661 // #2. Do the insertion.
Chris Lattnerbf268562008-04-14 22:10:58 +0000662 if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R))
663 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000664}
665
666void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
Chris Lattner5fd3e262008-04-14 17:54:23 +0000667 // #1. Split at Offset.
Chris Lattnerbf268562008-04-14 22:10:58 +0000668 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
669 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
Chris Lattner5fd3e262008-04-14 17:54:23 +0000670
671 // #2. Do the erasing.
672 getRoot(Root)->erase(Offset, NumBytes);
673}
Chris Lattner5618d882008-04-14 21:41:00 +0000674
675//===----------------------------------------------------------------------===//
676// RewriteRope Implementation
677//===----------------------------------------------------------------------===//
678
679RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
680 unsigned Len = End-Start;
681
682 // If we have space for this string in the current alloc buffer, use it.
683 if (AllocOffs+Len <= AllocChunkSize) {
684 memcpy(AllocBuffer->Data+AllocOffs, Start, Len);
685 AllocOffs += Len;
686 return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs);
687 }
688
689 // If we don't have enough room because this specific allocation is huge,
690 // just allocate a new rope piece for it alone.
691 if (Len > AllocChunkSize) {
692 unsigned Size = End-Start+sizeof(RopeRefCountString)-1;
693 RopeRefCountString *Res =
Chris Lattnerbf268562008-04-14 22:10:58 +0000694 reinterpret_cast<RopeRefCountString *>(new char[Size]);
Chris Lattner5618d882008-04-14 21:41:00 +0000695 Res->RefCount = 0;
696 memcpy(Res->Data, Start, End-Start);
697 return RopePiece(Res, 0, End-Start);
698 }
699
700 // Otherwise, this was a small request but we just don't have space for it
701 // Make a new chunk and share it with later allocations.
702
703 // If we had an old allocation, drop our reference to it.
704 if (AllocBuffer && --AllocBuffer->RefCount == 0)
705 delete [] (char*)AllocBuffer;
706
707 unsigned AllocSize = sizeof(RopeRefCountString)-1+AllocChunkSize;
708 AllocBuffer = reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
709 AllocBuffer->RefCount = 0;
710 memcpy(AllocBuffer->Data, Start, Len);
711 AllocOffs = Len;
712
713 // Start out the new allocation with a refcount of 1, since we have an
714 // internal reference to it.
715 AllocBuffer->addRef();
716 return RopePiece(AllocBuffer, 0, Len);
717}
718
719