blob: 2adf5c4f4bb9176304391d59d93d30685075e0e5 [file] [log] [blame]
Chris Lattner5fd3e262008-04-14 17:54:23 +00001//===--- RewriteRope.cpp - Rope specialized for rewriter --------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the RewriteRope class, which is a powerful string.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/Rewrite/RewriteRope.h"
15#include "llvm/Support/Casting.h"
Chris Lattner5618d882008-04-14 21:41:00 +000016#include <algorithm>
Chris Lattner5fd3e262008-04-14 17:54:23 +000017using namespace clang;
18using llvm::dyn_cast;
19using llvm::cast;
20
21
22//===----------------------------------------------------------------------===//
23// InsertResult Class
24//===----------------------------------------------------------------------===//
25
26/// This is an adapted B+ Tree, ... erases don't keep the tree balanced.
27
28namespace {
29 class RopePieceBTreeNode;
30 struct InsertResult {
31 RopePieceBTreeNode *LHS, *RHS;
32 };
33} // end anonymous namespace
34
35//===----------------------------------------------------------------------===//
36// RopePieceBTreeNode Class
37//===----------------------------------------------------------------------===//
38
39namespace {
40 class RopePieceBTreeNode {
41 protected:
42 /// WidthFactor - This controls the number of K/V slots held in the BTree:
43 /// how wide it is. Each level of the BTree is guaranteed to have at least
44 /// 'WidthFactor' elements in it (either ropepieces or children), (except
45 /// the root, which may have less) and may have at most 2*WidthFactor
46 /// elements.
47 enum { WidthFactor = 8 };
48
49 /// Size - This is the number of bytes of file this node (including any
50 /// potential children) covers.
51 unsigned Size;
52
53 /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
54 /// is an instance of RopePieceBTreeInterior.
55 bool IsLeaf;
56
Chris Lattnerb442e212008-04-14 20:05:32 +000057 RopePieceBTreeNode(bool isLeaf) : Size(0), IsLeaf(isLeaf) {}
Chris Lattner5fd3e262008-04-14 17:54:23 +000058 ~RopePieceBTreeNode() {}
59 public:
60
61 bool isLeaf() const { return IsLeaf; }
62 unsigned size() const { return Size; }
63
64 void Destroy();
65
66 /// split - Split the range containing the specified offset so that we are
67 /// guaranteed that there is a place to do an insertion at the specified
68 /// offset. The offset is relative, so "0" is the start of the node. This
69 /// returns true if the insertion could not be done in place, and returns
70 /// information in 'Res' about the piece that is percolated up.
71 bool split(unsigned Offset, InsertResult *Res);
72
73 /// insert - Insert the specified ropepiece into this tree node at the
74 /// specified offset. The offset is relative, so "0" is the start of the
75 /// node. This returns true if the insertion could not be done in place,
76 /// and returns information in 'Res' about the piece that is percolated up.
77 bool insert(unsigned Offset, const RopePiece &R, InsertResult *Res);
78
79 /// erase - Remove NumBytes from this node at the specified offset. We are
80 /// guaranteed that there is a split at Offset.
81 void erase(unsigned Offset, unsigned NumBytes);
82
83 static inline bool classof(const RopePieceBTreeNode *) { return true; }
84
85 };
86} // end anonymous namespace
87
88//===----------------------------------------------------------------------===//
89// RopePieceBTreeLeaf Class
90//===----------------------------------------------------------------------===//
91
92namespace {
93 class RopePieceBTreeLeaf : public RopePieceBTreeNode {
94 /// NumPieces - This holds the number of rope pieces currently active in the
95 /// Pieces array.
96 unsigned char NumPieces;
97
98 /// Pieces - This tracks the file chunks currently in this leaf.
99 ///
100 RopePiece Pieces[2*WidthFactor];
101
102 /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
103 /// efficient in-order forward iteration of the tree without traversal.
104 const RopePieceBTreeLeaf *NextLeaf;
105 public:
Chris Lattner70778c82008-04-14 20:07:03 +0000106 RopePieceBTreeLeaf() : RopePieceBTreeNode(true), NumPieces(0), NextLeaf(0){}
Chris Lattner5fd3e262008-04-14 17:54:23 +0000107
108 bool isFull() const { return NumPieces == 2*WidthFactor; }
109
110 /// clear - Remove all rope pieces from this leaf.
111 void clear() {
112 while (NumPieces)
113 Pieces[--NumPieces] = RopePiece();
114 Size = 0;
115 }
116
117 unsigned getNumPieces() const { return NumPieces; }
118
119 const RopePiece &getPiece(unsigned i) const {
120 assert(i < getNumPieces() && "Invalid piece ID");
121 return Pieces[i];
122 }
123
124 const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
125 void setNextLeafInOrder(const RopePieceBTreeLeaf *NL) { NextLeaf = NL; }
126
127 void FullRecomputeSizeLocally() {
128 Size = 0;
129 for (unsigned i = 0, e = getNumPieces(); i != e; ++i)
130 Size += getPiece(i).size();
131 }
132
133 /// split - Split the range containing the specified offset so that we are
134 /// guaranteed that there is a place to do an insertion at the specified
135 /// offset. The offset is relative, so "0" is the start of the node. This
136 /// returns true if the insertion could not be done in place, and returns
137 /// information in 'Res' about the piece that is percolated up.
138 bool split(unsigned Offset, InsertResult *Res);
139
140 /// insert - Insert the specified ropepiece into this tree node at the
141 /// specified offset. The offset is relative, so "0" is the start of the
142 /// node. This returns true if the insertion could not be done in place,
143 /// and returns information in 'Res' about the piece that is percolated up.
144 bool insert(unsigned Offset, const RopePiece &R, InsertResult *Res);
145
146
147 /// erase - Remove NumBytes from this node at the specified offset. We are
148 /// guaranteed that there is a split at Offset.
149 void erase(unsigned Offset, unsigned NumBytes);
150
151 static inline bool classof(const RopePieceBTreeLeaf *) { return true; }
152 static inline bool classof(const RopePieceBTreeNode *N) {
153 return N->isLeaf();
154 }
155 };
156} // end anonymous namespace
157
158/// split - Split the range containing the specified offset so that we are
159/// guaranteed that there is a place to do an insertion at the specified
160/// offset. The offset is relative, so "0" is the start of the node. This
161/// returns true if the insertion could not be done in place, and returns
162/// information in 'Res' about the piece that is percolated up.
163bool RopePieceBTreeLeaf::split(unsigned Offset, InsertResult *Res) {
164 // Find the insertion point. We are guaranteed that there is a split at the
165 // specified offset so find it.
166 if (Offset == 0 || Offset == size()) {
167 // Fastpath for a common case. There is already a splitpoint at the end.
168 return false;
169 }
170
171 // Find the piece that this offset lands in.
172 unsigned PieceOffs = 0;
173 unsigned i = 0;
174 while (Offset >= PieceOffs+Pieces[i].size()) {
175 PieceOffs += Pieces[i].size();
176 ++i;
177 }
178
179 // If there is already a split point at the specified offset, just return
180 // success.
181 if (PieceOffs == Offset)
182 return false;
183
184 // Otherwise, we need to split piece 'i' at Offset-PieceOffs. Convert Offset
185 // to being Piece relative.
186 unsigned IntraPieceOffset = Offset-PieceOffs;
187
188 // We do this by shrinking the RopePiece and then doing an insert of the tail.
189 RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset,
190 Pieces[i].EndOffs);
191 Size -= Pieces[i].size();
192 Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset;
193 Size += Pieces[i].size();
194
195 return insert(Offset, Tail, Res);
196}
197
198
199/// insert - Insert the specified RopePiece into this tree node at the
200/// specified offset. The offset is relative, so "0" is the start of the
201/// node. This returns true if the insertion could not be done in place, and
202/// returns information in 'Res' about the piece that is percolated up.
203bool RopePieceBTreeLeaf::insert(unsigned Offset, const RopePiece &R,
204 InsertResult *Res) {
205 // If this node is not full, insert the piece.
206 if (!isFull()) {
207 // Find the insertion point. We are guaranteed that there is a split at the
208 // specified offset so find it.
209 unsigned i = 0, e = getNumPieces();
210 if (Offset == size()) {
211 // Fastpath for a common case.
212 i = e;
213 } else {
214 unsigned SlotOffs = 0;
215 for (; Offset > SlotOffs; ++i)
216 SlotOffs += getPiece(i).size();
217 assert(SlotOffs == Offset && "Split didn't occur before insertion!");
218 }
219
220 // For an insertion into a non-full leaf node, just insert the value in
221 // its sorted position. This requires moving later values over.
222 for (; i != e; --e)
223 Pieces[e] = Pieces[e-1];
224 Pieces[i] = R;
225 ++NumPieces;
226 Size += R.size();
227 return false;
228 }
229
230 // Otherwise, if this is leaf is full, split it in two halves. Since this
231 // node is full, it contains 2*WidthFactor values. We move the first
232 // 'WidthFactor' values to the LHS child (which we leave in this node) and
233 // move the last 'WidthFactor' values into the RHS child.
234
235 // Create the new node.
236 RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
237
238 // Move over the last 'WidthFactor' values from here to NewNode.
239 std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor],
240 &NewNode->Pieces[0]);
241 // Replace old pieces with null RopePieces to drop refcounts.
242 std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece());
243
244 // Decrease the number of values in the two nodes.
245 NewNode->NumPieces = NumPieces = WidthFactor;
246
247 // Recompute the two nodes' size.
248 NewNode->FullRecomputeSizeLocally();
249 FullRecomputeSizeLocally();
250
251 // Update the list of leaves.
252 NewNode->setNextLeafInOrder(this->getNextLeafInOrder());
253 this->setNextLeafInOrder(NewNode);
254
255 assert(Res && "No result location specified");
256 Res->LHS = this;
257 Res->RHS = NewNode;
258
259 if (this->size() >= Offset)
260 this->insert(Offset, R, 0 /*can't fail*/);
261 else
262 NewNode->insert(Offset - this->size(), R, 0 /*can't fail*/);
263 return true;
264}
265
266/// erase - Remove NumBytes from this node at the specified offset. We are
267/// guaranteed that there is a split at Offset.
268void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
269 // Since we are guaranteed that there is a split at Offset, we start by
270 // finding the Piece that starts there.
271 unsigned PieceOffs = 0;
272 unsigned i = 0;
273 for (; Offset > PieceOffs; ++i)
274 PieceOffs += getPiece(i).size();
275 assert(PieceOffs == Offset && "Split didn't occur before erase!");
276
277 unsigned StartPiece = i;
278
279 // Figure out how many pieces completely cover 'NumBytes'. We want to remove
280 // all of them.
281 for (; Offset+NumBytes > PieceOffs+getPiece(i).size(); ++i)
282 PieceOffs += getPiece(i).size();
283
284 // If we exactly include the last one, include it in the region to delete.
285 if (Offset+NumBytes == PieceOffs+getPiece(i).size())
286 PieceOffs += getPiece(i).size(), ++i;
287
288 // If we completely cover some RopePieces, erase them now.
289 if (i != StartPiece) {
290 unsigned NumDeleted = i-StartPiece;
291 for (; i != getNumPieces(); ++i)
292 Pieces[i-NumDeleted] = Pieces[i];
293
294 // Drop references to dead rope pieces.
295 std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()],
296 RopePiece());
297 NumPieces -= NumDeleted;
298
299 unsigned CoverBytes = PieceOffs-Offset;
300 NumBytes -= CoverBytes;
301 Size -= CoverBytes;
302 }
303
304 // If we completely removed some stuff, we could be done.
305 if (NumBytes == 0) return;
306
307 // Okay, now might be erasing part of some Piece. If this is the case, then
308 // move the start point of the piece.
309 assert(getPiece(StartPiece).size() > NumBytes);
310 Pieces[StartPiece].StartOffs += NumBytes;
311
312 // The size of this node just shrunk by NumBytes.
313 Size -= NumBytes;
314}
315
316//===----------------------------------------------------------------------===//
317// RopePieceBTreeInterior Class
318//===----------------------------------------------------------------------===//
319
320namespace {
321 // Holds up to 2*WidthFactor children.
322 class RopePieceBTreeInterior : public RopePieceBTreeNode {
323 /// NumChildren - This holds the number of children currently active in the
324 /// Children array.
325 unsigned char NumChildren;
326 RopePieceBTreeNode *Children[2*WidthFactor];
327 public:
Chris Lattner70778c82008-04-14 20:07:03 +0000328 RopePieceBTreeInterior() : RopePieceBTreeNode(false), NumChildren(0) {}
Chris Lattner5fd3e262008-04-14 17:54:23 +0000329
330 RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
331 : RopePieceBTreeNode(false) {
332 Children[0] = LHS;
333 Children[1] = RHS;
334 NumChildren = 2;
335 Size = LHS->size() + RHS->size();
336 }
337
338 bool isFull() const { return NumChildren == 2*WidthFactor; }
339
340 unsigned getNumChildren() const { return NumChildren; }
341 const RopePieceBTreeNode *getChild(unsigned i) const {
342 assert(i < NumChildren && "invalid child #");
343 return Children[i];
344 }
345 RopePieceBTreeNode *getChild(unsigned i) {
346 assert(i < NumChildren && "invalid child #");
347 return Children[i];
348 }
349
350 void FullRecomputeSizeLocally() {
351 Size = 0;
352 for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
353 Size += getChild(i)->size();
354 }
355
356
357 /// split - Split the range containing the specified offset so that we are
358 /// guaranteed that there is a place to do an insertion at the specified
359 /// offset. The offset is relative, so "0" is the start of the node. This
360 /// returns true if the insertion could not be done in place, and returns
361 /// information in 'Res' about the piece that is percolated up.
362 bool split(unsigned Offset, InsertResult *Res);
363
364
365 /// insert - Insert the specified ropepiece into this tree node at the
366 /// specified offset. The offset is relative, so "0" is the start of the
367 /// node. This returns true if the insertion could not be done in place,
368 /// and returns information in 'Res' about the piece that is percolated up.
369 bool insert(unsigned Offset, const RopePiece &R, InsertResult *Res);
370
371 /// HandleChildPiece - A child propagated an insertion result up to us.
372 /// Insert the new child, and/or propagate the result further up the tree.
373 bool HandleChildPiece(unsigned i, InsertResult &Res);
374
375 /// erase - Remove NumBytes from this node at the specified offset. We are
376 /// guaranteed that there is a split at Offset.
377 void erase(unsigned Offset, unsigned NumBytes);
378
379 static inline bool classof(const RopePieceBTreeInterior *) { return true; }
380 static inline bool classof(const RopePieceBTreeNode *N) {
381 return !N->isLeaf();
382 }
383 };
384} // end anonymous namespace
385
386/// split - Split the range containing the specified offset so that we are
387/// guaranteed that there is a place to do an insertion at the specified
388/// offset. The offset is relative, so "0" is the start of the node. This
389/// returns true if the insertion could not be done in place, and returns
390/// information in 'Res' about the piece that is percolated up.
391bool RopePieceBTreeInterior::split(unsigned Offset, InsertResult *Res) {
392 // Figure out which child to split.
393 if (Offset == 0 || Offset == size())
394 return false; // If we have an exact offset, we're already split.
395
396 unsigned ChildOffset = 0;
397 unsigned i = 0;
398 for (; Offset >= ChildOffset+getChild(i)->size(); ++i)
399 ChildOffset += getChild(i)->size();
400
401 // If already split there, we're done.
402 if (ChildOffset == Offset)
403 return false;
404
405 // Otherwise, recursively split the child.
406 if (getChild(i)->split(Offset-ChildOffset, Res))
407 return HandleChildPiece(i, *Res);
408 return false; // Done!
409}
410
411/// insert - Insert the specified ropepiece into this tree node at the
412/// specified offset. The offset is relative, so "0" is the start of the
413/// node. This returns true if the insertion could not be done in place, and
414/// returns information in 'Res' about the piece that is percolated up.
415bool RopePieceBTreeInterior::insert(unsigned Offset, const RopePiece &R,
416 InsertResult *Res) {
417 // Find the insertion point. We are guaranteed that there is a split at the
418 // specified offset so find it.
419 unsigned i = 0, e = getNumChildren();
420
421 unsigned ChildOffs = 0;
422 if (Offset == size()) {
423 // Fastpath for a common case. Insert at end of last child.
424 i = e-1;
425 ChildOffs = size()-getChild(i)->size();
426 } else {
427 for (; Offset > ChildOffs+getChild(i)->size(); ++i)
428 ChildOffs += getChild(i)->size();
429 }
430
431 Size += R.size();
432
433 // Insert at the end of this child.
434 if (getChild(i)->insert(Offset-ChildOffs, R, Res))
435 return HandleChildPiece(i, *Res);
436
437 return false;
438}
439
440/// HandleChildPiece - A child propagated an insertion result up to us.
441/// Insert the new child, and/or propagate the result further up the tree.
442bool RopePieceBTreeInterior::HandleChildPiece(unsigned i, InsertResult &Res) {
443 // Otherwise the child propagated a subtree up to us as a new child. See if
444 // we have space for it here.
445 if (!isFull()) {
446 // Replace child 'i' with the two children specified in Res.
447 if (i + 1 != getNumChildren())
448 memmove(&Children[i+2], &Children[i+1],
449 (getNumChildren()-i-1)*sizeof(Children[0]));
450 Children[i] = Res.LHS;
451 Children[i+1] = Res.RHS;
452 ++NumChildren;
453 return false;
454 }
455
456 // Okay, this node is full. Split it in half, moving WidthFactor children to
457 // a newly allocated interior node.
458
459 // Create the new node.
460 RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
461
462 // Move over the last 'WidthFactor' values from here to NewNode.
463 memcpy(&NewNode->Children[0], &Children[WidthFactor],
464 WidthFactor*sizeof(Children[0]));
465
466 // Decrease the number of values in the two nodes.
467 NewNode->NumChildren = NumChildren = WidthFactor;
468
469 // Finally, insert the two new children in the side the can (now) hold them.
470 if (i < WidthFactor)
471 this->HandleChildPiece(i, Res);
472 else
473 NewNode->HandleChildPiece(i-WidthFactor, Res);
474
475 // Recompute the two nodes' size.
476 NewNode->FullRecomputeSizeLocally();
477 FullRecomputeSizeLocally();
478
479 Res.LHS = this;
480 Res.RHS = NewNode;
481 return true;
482}
483
484/// erase - Remove NumBytes from this node at the specified offset. We are
485/// guaranteed that there is a split at Offset.
486void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
487 // This will shrink this node by NumBytes.
488 Size -= NumBytes;
489
490 // Find the first child that overlaps with Offset.
491 unsigned i = 0;
492 for (; Offset >= getChild(i)->size(); ++i)
493 Offset -= getChild(i)->size();
494
495 // Propagate the delete request into overlapping children, or completely
496 // delete the children as appropriate.
497 while (NumBytes) {
498 RopePieceBTreeNode *CurChild = getChild(i);
499
500 // If we are deleting something contained entirely in the child, pass on the
501 // request.
502 if (Offset+NumBytes < CurChild->size()) {
503 CurChild->erase(Offset, NumBytes);
504 return;
505 }
506
507 // If this deletion request starts somewhere in the middle of the child, it
508 // must be deleting to the end of the child.
509 if (Offset) {
510 unsigned BytesFromChild = CurChild->size()-Offset;
511 CurChild->erase(Offset, BytesFromChild);
512 NumBytes -= BytesFromChild;
513 ++i;
514 continue;
515 }
516
517 // If the deletion request completely covers the child, delete it and move
518 // the rest down.
519 NumBytes -= CurChild->size();
520 CurChild->Destroy();
521 --NumChildren;
522 if (i+1 != getNumChildren())
523 memmove(&Children[i], &Children[i+1],
524 (getNumChildren()-i)*sizeof(Children[0]));
525 }
526}
527
528//===----------------------------------------------------------------------===//
529// RopePieceBTreeNode Implementation
530//===----------------------------------------------------------------------===//
531
532void RopePieceBTreeNode::Destroy() {
533 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
534 delete Leaf;
535 else
536 delete cast<RopePieceBTreeInterior>(this);
537}
538
539/// split - Split the range containing the specified offset so that we are
540/// guaranteed that there is a place to do an insertion at the specified
541/// offset. The offset is relative, so "0" is the start of the node. This
542/// returns true if the insertion could not be done in place, and returns
543/// information in 'Res' about the piece that is percolated up.
544bool RopePieceBTreeNode::split(unsigned Offset, InsertResult *Res) {
545 assert(Offset <= size() && "Invalid offset to split!");
546 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
547 return Leaf->split(Offset, Res);
548 return cast<RopePieceBTreeInterior>(this)->split(Offset, Res);
549}
550
551/// insert - Insert the specified ropepiece into this tree node at the
552/// specified offset. The offset is relative, so "0" is the start of the
553/// node.
554bool RopePieceBTreeNode::insert(unsigned Offset, const RopePiece &R,
555 InsertResult *Res) {
556 assert(Offset <= size() && "Invalid offset to insert!");
557 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
558 return Leaf->insert(Offset, R, Res);
559 return cast<RopePieceBTreeInterior>(this)->insert(Offset, R, Res);
560}
561
562/// erase - Remove NumBytes from this node at the specified offset. We are
563/// guaranteed that there is a split at Offset.
564void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
565 assert(Offset+NumBytes <= size() && "Invalid offset to erase!");
566 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
567 return Leaf->erase(Offset, NumBytes);
568 return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
569}
570
571
572//===----------------------------------------------------------------------===//
573// RopePieceBTreeIterator Implementation
574//===----------------------------------------------------------------------===//
575
576static const RopePieceBTreeLeaf *getCN(const void *P) {
577 return static_cast<const RopePieceBTreeLeaf*>(P);
578}
579
580// begin iterator.
581RopePieceBTreeIterator::RopePieceBTreeIterator(const void *n) {
582 const RopePieceBTreeNode *N = static_cast<const RopePieceBTreeNode*>(n);
583
584 // Walk down the left side of the tree until we get to a leaf.
585 while (const RopePieceBTreeInterior *IN = dyn_cast<RopePieceBTreeInterior>(N))
586 N = IN->getChild(0);
587
588 // We must have at least one leaf.
589 CurNode = cast<RopePieceBTreeLeaf>(N);
590
591 // If we found a leaf that happens to be empty, skip over it until we get
592 // to something full.
593 while (CurNode && getCN(CurNode)->getNumPieces() == 0)
594 CurNode = getCN(CurNode)->getNextLeafInOrder();
595
596 if (CurNode != 0)
597 CurPiece = &getCN(CurNode)->getPiece(0);
598 else // Empty tree, this is an end() iterator.
599 CurPiece = 0;
600 CurChar = 0;
601}
602
603void RopePieceBTreeIterator::MoveToNextPiece() {
604 if (CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)) {
605 CurChar = 0;
606 ++CurPiece;
607 return;
608 }
609
610 // Find the next non-empty leaf node.
611 do
612 CurNode = getCN(CurNode)->getNextLeafInOrder();
613 while (CurNode && getCN(CurNode)->getNumPieces() == 0);
614
615 if (CurNode != 0)
616 CurPiece = &getCN(CurNode)->getPiece(0);
617 else // Hit end().
618 CurPiece = 0;
619 CurChar = 0;
620}
621
622//===----------------------------------------------------------------------===//
623// RopePieceBTree Implementation
624//===----------------------------------------------------------------------===//
625
626static RopePieceBTreeNode *getRoot(void *P) {
627 return static_cast<RopePieceBTreeNode*>(P);
628}
629
630RopePieceBTree::RopePieceBTree() {
631 Root = new RopePieceBTreeLeaf();
632}
633RopePieceBTree::RopePieceBTree(const RopePieceBTree &RHS) {
634 assert(RHS.empty() && "Can't copy non-empty tree yet");
635 Root = new RopePieceBTreeLeaf();
636}
637RopePieceBTree::~RopePieceBTree() {
638 getRoot(Root)->Destroy();
639}
640
641unsigned RopePieceBTree::size() const {
642 return getRoot(Root)->size();
643}
644
645void RopePieceBTree::clear() {
646 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
647 Leaf->clear();
648 else {
649 getRoot(Root)->Destroy();
650 Root = new RopePieceBTreeLeaf();
651 }
652}
653
654void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
655 InsertResult Result;
656 // #1. Split at Offset.
657 if (getRoot(Root)->split(Offset, &Result))
658 Root = new RopePieceBTreeInterior(Result.LHS, Result.RHS);
659
660 // #2. Do the insertion.
661 if (getRoot(Root)->insert(Offset, R, &Result))
662 Root = new RopePieceBTreeInterior(Result.LHS, Result.RHS);
663}
664
665void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
666 InsertResult Result;
667 // #1. Split at Offset.
668 if (getRoot(Root)->split(Offset, &Result))
669 Root = new RopePieceBTreeInterior(Result.LHS, Result.RHS);
670
671 // #2. Do the erasing.
672 getRoot(Root)->erase(Offset, NumBytes);
673}
Chris Lattner5618d882008-04-14 21:41:00 +0000674
675//===----------------------------------------------------------------------===//
676// RewriteRope Implementation
677//===----------------------------------------------------------------------===//
678
679RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
680 unsigned Len = End-Start;
681
682 // If we have space for this string in the current alloc buffer, use it.
683 if (AllocOffs+Len <= AllocChunkSize) {
684 memcpy(AllocBuffer->Data+AllocOffs, Start, Len);
685 AllocOffs += Len;
686 return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs);
687 }
688
689 // If we don't have enough room because this specific allocation is huge,
690 // just allocate a new rope piece for it alone.
691 if (Len > AllocChunkSize) {
692 unsigned Size = End-Start+sizeof(RopeRefCountString)-1;
693 RopeRefCountString *Res =
694 reinterpret_cast<RopeRefCountString *>(new char[Size]);
695 Res->RefCount = 0;
696 memcpy(Res->Data, Start, End-Start);
697 return RopePiece(Res, 0, End-Start);
698 }
699
700 // Otherwise, this was a small request but we just don't have space for it
701 // Make a new chunk and share it with later allocations.
702
703 // If we had an old allocation, drop our reference to it.
704 if (AllocBuffer && --AllocBuffer->RefCount == 0)
705 delete [] (char*)AllocBuffer;
706
707 unsigned AllocSize = sizeof(RopeRefCountString)-1+AllocChunkSize;
708 AllocBuffer = reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
709 AllocBuffer->RefCount = 0;
710 memcpy(AllocBuffer->Data, Start, Len);
711 AllocOffs = Len;
712
713 // Start out the new allocation with a refcount of 1, since we have an
714 // internal reference to it.
715 AllocBuffer->addRef();
716 return RopePiece(AllocBuffer, 0, Len);
717}
718
719