blob: 1828d3d3e12ea9c43ef3974cbdc2a0acfaf06013 [file] [log] [blame]
Ilya Biryukove7230ea2019-05-22 14:44:45 +00001//===- Tokens.cpp - collect tokens from preprocessing ---------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8#include "clang/Tooling/Syntax/Tokens.h"
9
10#include "clang/Basic/Diagnostic.h"
11#include "clang/Basic/IdentifierTable.h"
12#include "clang/Basic/LLVM.h"
13#include "clang/Basic/LangOptions.h"
14#include "clang/Basic/SourceLocation.h"
15#include "clang/Basic/SourceManager.h"
16#include "clang/Basic/TokenKinds.h"
Ilya Biryukov5e69f272019-06-24 21:39:51 +000017#include "clang/Lex/PPCallbacks.h"
Ilya Biryukove7230ea2019-05-22 14:44:45 +000018#include "clang/Lex/Preprocessor.h"
19#include "clang/Lex/Token.h"
20#include "llvm/ADT/ArrayRef.h"
21#include "llvm/ADT/None.h"
22#include "llvm/ADT/Optional.h"
23#include "llvm/ADT/STLExtras.h"
24#include "llvm/Support/Debug.h"
25#include "llvm/Support/ErrorHandling.h"
26#include "llvm/Support/FormatVariadic.h"
27#include "llvm/Support/raw_ostream.h"
28#include <algorithm>
29#include <cassert>
30#include <iterator>
31#include <string>
32#include <utility>
33#include <vector>
34
35using namespace clang;
36using namespace clang::syntax;
37
38syntax::Token::Token(const clang::Token &T)
39 : Token(T.getLocation(), T.getLength(), T.getKind()) {
40 assert(!T.isAnnotation());
41}
42
43llvm::StringRef syntax::Token::text(const SourceManager &SM) const {
44 bool Invalid = false;
45 const char *Start = SM.getCharacterData(location(), &Invalid);
46 assert(!Invalid);
47 return llvm::StringRef(Start, length());
48}
49
50FileRange syntax::Token::range(const SourceManager &SM) const {
51 assert(location().isFileID() && "must be a spelled token");
52 FileID File;
53 unsigned StartOffset;
54 std::tie(File, StartOffset) = SM.getDecomposedLoc(location());
55 return FileRange(File, StartOffset, StartOffset + length());
56}
57
58FileRange syntax::Token::range(const SourceManager &SM,
59 const syntax::Token &First,
60 const syntax::Token &Last) {
61 auto F = First.range(SM);
62 auto L = Last.range(SM);
63 assert(F.file() == L.file() && "tokens from different files");
64 assert(F.endOffset() <= L.beginOffset() && "wrong order of tokens");
65 return FileRange(F.file(), F.beginOffset(), L.endOffset());
66}
67
68llvm::raw_ostream &syntax::operator<<(llvm::raw_ostream &OS, const Token &T) {
69 return OS << T.str();
70}
71
72FileRange::FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)
73 : File(File), Begin(BeginOffset), End(EndOffset) {
Ilya Biryukov5e69f272019-06-24 21:39:51 +000074 assert(File.isValid());
75 assert(BeginOffset <= EndOffset);
Ilya Biryukove7230ea2019-05-22 14:44:45 +000076}
77
78FileRange::FileRange(const SourceManager &SM, SourceLocation BeginLoc,
79 unsigned Length) {
80 assert(BeginLoc.isValid());
81 assert(BeginLoc.isFileID());
82
83 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc);
84 End = Begin + Length;
85}
86FileRange::FileRange(const SourceManager &SM, SourceLocation BeginLoc,
87 SourceLocation EndLoc) {
88 assert(BeginLoc.isValid());
89 assert(BeginLoc.isFileID());
90 assert(EndLoc.isValid());
91 assert(EndLoc.isFileID());
92 assert(SM.getFileID(BeginLoc) == SM.getFileID(EndLoc));
93 assert(SM.getFileOffset(BeginLoc) <= SM.getFileOffset(EndLoc));
94
95 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc);
96 End = SM.getFileOffset(EndLoc);
97}
98
99llvm::raw_ostream &syntax::operator<<(llvm::raw_ostream &OS,
100 const FileRange &R) {
101 return OS << llvm::formatv("FileRange(file = {0}, offsets = {1}-{2})",
102 R.file().getHashValue(), R.beginOffset(),
103 R.endOffset());
104}
105
106llvm::StringRef FileRange::text(const SourceManager &SM) const {
107 bool Invalid = false;
108 StringRef Text = SM.getBufferData(File, &Invalid);
109 if (Invalid)
110 return "";
111 assert(Begin <= Text.size());
112 assert(End <= Text.size());
113 return Text.substr(Begin, length());
114}
115
116std::pair<const syntax::Token *, const TokenBuffer::Mapping *>
117TokenBuffer::spelledForExpandedToken(const syntax::Token *Expanded) const {
118 assert(Expanded);
119 assert(ExpandedTokens.data() <= Expanded &&
120 Expanded < ExpandedTokens.data() + ExpandedTokens.size());
121
122 auto FileIt = Files.find(
123 SourceMgr->getFileID(SourceMgr->getExpansionLoc(Expanded->location())));
124 assert(FileIt != Files.end() && "no file for an expanded token");
125
126 const MarkedFile &File = FileIt->second;
127
128 unsigned ExpandedIndex = Expanded - ExpandedTokens.data();
129 // Find the first mapping that produced tokens after \p Expanded.
130 auto It = llvm::bsearch(File.Mappings, [&](const Mapping &M) {
131 return ExpandedIndex < M.BeginExpanded;
132 });
133 // Our token could only be produced by the previous mapping.
134 if (It == File.Mappings.begin()) {
135 // No previous mapping, no need to modify offsets.
136 return {&File.SpelledTokens[ExpandedIndex - File.BeginExpanded], nullptr};
137 }
138 --It; // 'It' now points to last mapping that started before our token.
139
140 // Check if the token is part of the mapping.
141 if (ExpandedIndex < It->EndExpanded)
142 return {&File.SpelledTokens[It->BeginSpelled], /*Mapping*/ &*It};
143
144 // Not part of the mapping, use the index from previous mapping to compute the
145 // corresponding spelled token.
146 return {
147 &File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],
148 /*Mapping*/ nullptr};
149}
150
151llvm::ArrayRef<syntax::Token> TokenBuffer::spelledTokens(FileID FID) const {
152 auto It = Files.find(FID);
153 assert(It != Files.end());
154 return It->second.SpelledTokens;
155}
156
157std::string TokenBuffer::Mapping::str() const {
158 return llvm::formatv("spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",
159 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded);
160}
161
162llvm::Optional<llvm::ArrayRef<syntax::Token>>
163TokenBuffer::spelledForExpanded(llvm::ArrayRef<syntax::Token> Expanded) const {
164 // Mapping an empty range is ambiguous in case of empty mappings at either end
165 // of the range, bail out in that case.
166 if (Expanded.empty())
167 return llvm::None;
168
169 // FIXME: also allow changes uniquely mapping to macro arguments.
170
171 const syntax::Token *BeginSpelled;
172 const Mapping *BeginMapping;
173 std::tie(BeginSpelled, BeginMapping) =
174 spelledForExpandedToken(&Expanded.front());
175
176 const syntax::Token *LastSpelled;
177 const Mapping *LastMapping;
178 std::tie(LastSpelled, LastMapping) =
179 spelledForExpandedToken(&Expanded.back());
180
181 FileID FID = SourceMgr->getFileID(BeginSpelled->location());
182 // FIXME: Handle multi-file changes by trying to map onto a common root.
183 if (FID != SourceMgr->getFileID(LastSpelled->location()))
184 return llvm::None;
185
186 const MarkedFile &File = Files.find(FID)->second;
187
188 // Do not allow changes that cross macro expansion boundaries.
189 unsigned BeginExpanded = Expanded.begin() - ExpandedTokens.data();
190 unsigned EndExpanded = Expanded.end() - ExpandedTokens.data();
191 if (BeginMapping && BeginMapping->BeginExpanded < BeginExpanded)
192 return llvm::None;
193 if (LastMapping && EndExpanded < LastMapping->EndExpanded)
194 return llvm::None;
195 // All is good, return the result.
196 return llvm::makeArrayRef(
197 BeginMapping ? File.SpelledTokens.data() + BeginMapping->BeginSpelled
198 : BeginSpelled,
199 LastMapping ? File.SpelledTokens.data() + LastMapping->EndSpelled
200 : LastSpelled + 1);
201}
202
Ilya Biryukov5aed3092019-06-18 16:27:27 +0000203llvm::Optional<TokenBuffer::Expansion>
204TokenBuffer::expansionStartingAt(const syntax::Token *Spelled) const {
205 assert(Spelled);
206 assert(Spelled->location().isFileID() && "not a spelled token");
207 auto FileIt = Files.find(SourceMgr->getFileID(Spelled->location()));
208 assert(FileIt != Files.end() && "file not tracked by token buffer");
209
210 auto &File = FileIt->second;
211 assert(File.SpelledTokens.data() <= Spelled &&
212 Spelled < (File.SpelledTokens.data() + File.SpelledTokens.size()));
213
214 unsigned SpelledIndex = Spelled - File.SpelledTokens.data();
215 auto M = llvm::bsearch(File.Mappings, [&](const Mapping &M) {
216 return SpelledIndex <= M.BeginSpelled;
217 });
218 if (M == File.Mappings.end() || M->BeginSpelled != SpelledIndex)
219 return llvm::None;
220
221 Expansion E;
222 E.Spelled = llvm::makeArrayRef(File.SpelledTokens.data() + M->BeginSpelled,
223 File.SpelledTokens.data() + M->EndSpelled);
224 E.Expanded = llvm::makeArrayRef(ExpandedTokens.data() + M->BeginExpanded,
225 ExpandedTokens.data() + M->EndExpanded);
226 return E;
227}
228
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000229std::vector<syntax::Token> syntax::tokenize(FileID FID, const SourceManager &SM,
230 const LangOptions &LO) {
231 std::vector<syntax::Token> Tokens;
232 IdentifierTable Identifiers(LO);
233 auto AddToken = [&](clang::Token T) {
234 // Fill the proper token kind for keywords, etc.
235 if (T.getKind() == tok::raw_identifier && !T.needsCleaning() &&
236 !T.hasUCN()) { // FIXME: support needsCleaning and hasUCN cases.
237 clang::IdentifierInfo &II = Identifiers.get(T.getRawIdentifier());
238 T.setIdentifierInfo(&II);
239 T.setKind(II.getTokenID());
240 }
241 Tokens.push_back(syntax::Token(T));
242 };
243
244 Lexer L(FID, SM.getBuffer(FID), SM, LO);
245
246 clang::Token T;
247 while (!L.LexFromRawLexer(T))
248 AddToken(T);
249 // 'eof' is only the last token if the input is null-terminated. Never store
250 // it, for consistency.
251 if (T.getKind() != tok::eof)
252 AddToken(T);
253 return Tokens;
254}
255
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000256/// Records information reqired to construct mappings for the token buffer that
257/// we are collecting.
258class TokenCollector::CollectPPExpansions : public PPCallbacks {
259public:
260 CollectPPExpansions(TokenCollector &C) : Collector(&C) {}
261
262 /// Disabled instance will stop reporting anything to TokenCollector.
263 /// This ensures that uses of the preprocessor after TokenCollector::consume()
264 /// is called do not access the (possibly invalid) collector instance.
265 void disable() { Collector = nullptr; }
266
267 void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD,
268 SourceRange Range, const MacroArgs *Args) override {
269 if (!Collector)
270 return;
271 // Only record top-level expansions, not those where:
272 // - the macro use is inside a macro body,
273 // - the macro appears in an argument to another macro.
274 if (!MacroNameTok.getLocation().isFileID() ||
275 (LastExpansionEnd.isValid() &&
276 Collector->PP.getSourceManager().isBeforeInTranslationUnit(
277 Range.getBegin(), LastExpansionEnd)))
278 return;
279 Collector->Expansions[Range.getBegin().getRawEncoding()] = Range.getEnd();
280 LastExpansionEnd = Range.getEnd();
281 }
282 // FIXME: handle directives like #pragma, #include, etc.
283private:
284 TokenCollector *Collector;
285 /// Used to detect recursive macro expansions.
286 SourceLocation LastExpansionEnd;
287};
288
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000289/// Fills in the TokenBuffer by tracing the run of a preprocessor. The
290/// implementation tracks the tokens, macro expansions and directives coming
291/// from the preprocessor and:
292/// - for each token, figures out if it is a part of an expanded token stream,
293/// spelled token stream or both. Stores the tokens appropriately.
294/// - records mappings from the spelled to expanded token ranges, e.g. for macro
295/// expansions.
296/// FIXME: also properly record:
297/// - #include directives,
298/// - #pragma, #line and other PP directives,
299/// - skipped pp regions,
300/// - ...
301
302TokenCollector::TokenCollector(Preprocessor &PP) : PP(PP) {
303 // Collect the expanded token stream during preprocessing.
304 PP.setTokenWatcher([this](const clang::Token &T) {
305 if (T.isAnnotation())
306 return;
307 DEBUG_WITH_TYPE("collect-tokens", llvm::dbgs()
308 << "Token: "
309 << syntax::Token(T).dumpForTests(
310 this->PP.getSourceManager())
311 << "\n"
312
313 );
314 Expanded.push_back(syntax::Token(T));
315 });
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000316 // And locations of macro calls, to properly recover boundaries of those in
317 // case of empty expansions.
318 auto CB = llvm::make_unique<CollectPPExpansions>(*this);
319 this->Collector = CB.get();
320 PP.addPPCallbacks(std::move(CB));
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000321}
322
323/// Builds mappings and spelled tokens in the TokenBuffer based on the expanded
324/// token stream.
325class TokenCollector::Builder {
326public:
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000327 Builder(std::vector<syntax::Token> Expanded, PPExpansions CollectedExpansions,
328 const SourceManager &SM, const LangOptions &LangOpts)
329 : Result(SM), CollectedExpansions(std::move(CollectedExpansions)), SM(SM),
330 LangOpts(LangOpts) {
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000331 Result.ExpandedTokens = std::move(Expanded);
332 }
333
334 TokenBuffer build() && {
335 buildSpelledTokens();
336
337 // Walk over expanded tokens and spelled tokens in parallel, building the
338 // mappings between those using source locations.
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000339 // To correctly recover empty macro expansions, we also take locations
340 // reported to PPCallbacks::MacroExpands into account as we do not have any
341 // expanded tokens with source locations to guide us.
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000342
343 // The 'eof' token is special, it is not part of spelled token stream. We
344 // handle it separately at the end.
345 assert(!Result.ExpandedTokens.empty());
346 assert(Result.ExpandedTokens.back().kind() == tok::eof);
347 for (unsigned I = 0; I < Result.ExpandedTokens.size() - 1; ++I) {
348 // (!) I might be updated by the following call.
349 processExpandedToken(I);
350 }
351
352 // 'eof' not handled in the loop, do it here.
353 assert(SM.getMainFileID() ==
354 SM.getFileID(Result.ExpandedTokens.back().location()));
355 fillGapUntil(Result.Files[SM.getMainFileID()],
356 Result.ExpandedTokens.back().location(),
357 Result.ExpandedTokens.size() - 1);
358 Result.Files[SM.getMainFileID()].EndExpanded = Result.ExpandedTokens.size();
359
360 // Some files might have unaccounted spelled tokens at the end, add an empty
361 // mapping for those as they did not have expanded counterparts.
362 fillGapsAtEndOfFiles();
363
364 return std::move(Result);
365 }
366
367private:
368 /// Process the next token in an expanded stream and move corresponding
369 /// spelled tokens, record any mapping if needed.
370 /// (!) \p I will be updated if this had to skip tokens, e.g. for macros.
371 void processExpandedToken(unsigned &I) {
372 auto L = Result.ExpandedTokens[I].location();
373 if (L.isMacroID()) {
374 processMacroExpansion(SM.getExpansionRange(L), I);
375 return;
376 }
377 if (L.isFileID()) {
378 auto FID = SM.getFileID(L);
379 TokenBuffer::MarkedFile &File = Result.Files[FID];
380
381 fillGapUntil(File, L, I);
382
383 // Skip the token.
384 assert(File.SpelledTokens[NextSpelled[FID]].location() == L &&
385 "no corresponding token in the spelled stream");
386 ++NextSpelled[FID];
387 return;
388 }
389 }
390
391 /// Skipped expanded and spelled tokens of a macro expansion that covers \p
392 /// SpelledRange. Add a corresponding mapping.
393 /// (!) \p I will be the index of the last token in an expansion after this
394 /// function returns.
395 void processMacroExpansion(CharSourceRange SpelledRange, unsigned &I) {
396 auto FID = SM.getFileID(SpelledRange.getBegin());
397 assert(FID == SM.getFileID(SpelledRange.getEnd()));
398 TokenBuffer::MarkedFile &File = Result.Files[FID];
399
400 fillGapUntil(File, SpelledRange.getBegin(), I);
401
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000402 // Skip all expanded tokens from the same macro expansion.
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000403 unsigned BeginExpanded = I;
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000404 for (; I + 1 < Result.ExpandedTokens.size(); ++I) {
405 auto NextL = Result.ExpandedTokens[I + 1].location();
406 if (!NextL.isMacroID() ||
407 SM.getExpansionLoc(NextL) != SpelledRange.getBegin())
408 break;
409 }
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000410 unsigned EndExpanded = I + 1;
411 consumeMapping(File, SM.getFileOffset(SpelledRange.getEnd()), BeginExpanded,
412 EndExpanded, NextSpelled[FID]);
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000413 }
414
415 /// Initializes TokenBuffer::Files and fills spelled tokens and expanded
416 /// ranges for each of the files.
417 void buildSpelledTokens() {
418 for (unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) {
419 auto FID =
420 SM.getFileID(SM.getExpansionLoc(Result.ExpandedTokens[I].location()));
421 auto It = Result.Files.try_emplace(FID);
422 TokenBuffer::MarkedFile &File = It.first->second;
423
424 File.EndExpanded = I + 1;
425 if (!It.second)
426 continue; // we have seen this file before.
427
428 // This is the first time we see this file.
429 File.BeginExpanded = I;
430 File.SpelledTokens = tokenize(FID, SM, LangOpts);
431 }
432 }
433
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000434 void consumeEmptyMapping(TokenBuffer::MarkedFile &File, unsigned EndOffset,
435 unsigned ExpandedIndex, unsigned &SpelledIndex) {
436 consumeMapping(File, EndOffset, ExpandedIndex, ExpandedIndex, SpelledIndex);
437 }
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000438
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000439 /// Consumes spelled tokens that form a macro expansion and adds a entry to
440 /// the resulting token buffer.
441 /// (!) SpelledIndex is updated in-place.
442 void consumeMapping(TokenBuffer::MarkedFile &File, unsigned EndOffset,
443 unsigned BeginExpanded, unsigned EndExpanded,
444 unsigned &SpelledIndex) {
445 // We need to record this mapping before continuing.
446 unsigned MappingBegin = SpelledIndex;
447 ++SpelledIndex;
448
449 bool HitMapping =
450 tryConsumeSpelledUntil(File, EndOffset + 1, SpelledIndex).hasValue();
451 (void)HitMapping;
452 assert(!HitMapping && "recursive macro expansion?");
453
454 TokenBuffer::Mapping M;
455 M.BeginExpanded = BeginExpanded;
456 M.EndExpanded = EndExpanded;
457 M.BeginSpelled = MappingBegin;
458 M.EndSpelled = SpelledIndex;
459
460 File.Mappings.push_back(M);
461 }
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000462
463 /// Consumes spelled tokens until location \p L is reached and adds a mapping
464 /// covering the consumed tokens. The mapping will point to an empty expanded
465 /// range at position \p ExpandedIndex.
466 void fillGapUntil(TokenBuffer::MarkedFile &File, SourceLocation L,
467 unsigned ExpandedIndex) {
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000468 assert(L.isFileID());
469 FileID FID;
470 unsigned Offset;
471 std::tie(FID, Offset) = SM.getDecomposedLoc(L);
472
473 unsigned &SpelledIndex = NextSpelled[FID];
474 unsigned MappingBegin = SpelledIndex;
475 while (true) {
476 auto EndLoc = tryConsumeSpelledUntil(File, Offset, SpelledIndex);
477 if (SpelledIndex != MappingBegin) {
478 TokenBuffer::Mapping M;
479 M.BeginSpelled = MappingBegin;
480 M.EndSpelled = SpelledIndex;
481 M.BeginExpanded = M.EndExpanded = ExpandedIndex;
482 File.Mappings.push_back(M);
483 }
484 if (!EndLoc)
485 break;
486 consumeEmptyMapping(File, SM.getFileOffset(*EndLoc), ExpandedIndex,
487 SpelledIndex);
488
489 MappingBegin = SpelledIndex;
490 }
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000491 };
492
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000493 /// Consumes spelled tokens until it reaches Offset or a mapping boundary,
494 /// i.e. a name of a macro expansion or the start '#' token of a PP directive.
495 /// (!) NextSpelled is updated in place.
496 ///
497 /// returns None if \p Offset was reached, otherwise returns the end location
498 /// of a mapping that starts at \p NextSpelled.
499 llvm::Optional<SourceLocation>
500 tryConsumeSpelledUntil(TokenBuffer::MarkedFile &File, unsigned Offset,
501 unsigned &NextSpelled) {
502 for (; NextSpelled < File.SpelledTokens.size(); ++NextSpelled) {
503 auto L = File.SpelledTokens[NextSpelled].location();
504 if (Offset <= SM.getFileOffset(L))
505 return llvm::None; // reached the offset we are looking for.
506 auto Mapping = CollectedExpansions.find(L.getRawEncoding());
507 if (Mapping != CollectedExpansions.end())
508 return Mapping->second; // found a mapping before the offset.
509 }
510 return llvm::None; // no more tokens, we "reached" the offset.
511 }
512
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000513 /// Adds empty mappings for unconsumed spelled tokens at the end of each file.
514 void fillGapsAtEndOfFiles() {
515 for (auto &F : Result.Files) {
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000516 if (F.second.SpelledTokens.empty())
517 continue;
518 fillGapUntil(F.second, F.second.SpelledTokens.back().endLocation(),
519 F.second.EndExpanded);
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000520 }
521 }
522
523 TokenBuffer Result;
524 /// For each file, a position of the next spelled token we will consume.
525 llvm::DenseMap<FileID, unsigned> NextSpelled;
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000526 PPExpansions CollectedExpansions;
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000527 const SourceManager &SM;
528 const LangOptions &LangOpts;
529};
530
531TokenBuffer TokenCollector::consume() && {
532 PP.setTokenWatcher(nullptr);
Ilya Biryukov5e69f272019-06-24 21:39:51 +0000533 Collector->disable();
534 return Builder(std::move(Expanded), std::move(Expansions),
535 PP.getSourceManager(), PP.getLangOpts())
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000536 .build();
537}
538
539std::string syntax::Token::str() const {
540 return llvm::formatv("Token({0}, length = {1})", tok::getTokenName(kind()),
541 length());
542}
543
544std::string syntax::Token::dumpForTests(const SourceManager &SM) const {
545 return llvm::formatv("{0} {1}", tok::getTokenName(kind()), text(SM));
546}
547
548std::string TokenBuffer::dumpForTests() const {
549 auto PrintToken = [this](const syntax::Token &T) -> std::string {
550 if (T.kind() == tok::eof)
551 return "<eof>";
552 return T.text(*SourceMgr);
553 };
554
555 auto DumpTokens = [this, &PrintToken](llvm::raw_ostream &OS,
556 llvm::ArrayRef<syntax::Token> Tokens) {
Ilya Biryukov26c066d2019-06-19 13:56:36 +0000557 if (Tokens.empty()) {
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000558 OS << "<empty>";
559 return;
560 }
561 OS << Tokens[0].text(*SourceMgr);
562 for (unsigned I = 1; I < Tokens.size(); ++I) {
563 if (Tokens[I].kind() == tok::eof)
564 continue;
565 OS << " " << PrintToken(Tokens[I]);
566 }
567 };
568
569 std::string Dump;
570 llvm::raw_string_ostream OS(Dump);
571
572 OS << "expanded tokens:\n"
573 << " ";
Ilya Biryukov26c066d2019-06-19 13:56:36 +0000574 // (!) we do not show '<eof>'.
575 DumpTokens(OS, llvm::makeArrayRef(ExpandedTokens).drop_back());
Ilya Biryukove7230ea2019-05-22 14:44:45 +0000576 OS << "\n";
577
578 std::vector<FileID> Keys;
579 for (auto F : Files)
580 Keys.push_back(F.first);
581 llvm::sort(Keys);
582
583 for (FileID ID : Keys) {
584 const MarkedFile &File = Files.find(ID)->second;
585 auto *Entry = SourceMgr->getFileEntryForID(ID);
586 if (!Entry)
587 continue; // Skip builtin files.
588 OS << llvm::formatv("file '{0}'\n", Entry->getName())
589 << " spelled tokens:\n"
590 << " ";
591 DumpTokens(OS, File.SpelledTokens);
592 OS << "\n";
593
594 if (File.Mappings.empty()) {
595 OS << " no mappings.\n";
596 continue;
597 }
598 OS << " mappings:\n";
599 for (auto &M : File.Mappings) {
600 OS << llvm::formatv(
601 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",
602 PrintToken(File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,
603 M.EndSpelled == File.SpelledTokens.size()
604 ? "<eof>"
605 : PrintToken(File.SpelledTokens[M.EndSpelled]),
606 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),
607 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),
608 M.EndExpanded);
609 }
610 }
611 return OS.str();
612}