apply Eli's patch to fix PR4008, with a testcase.  Thanks Eli!


git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@69750 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Lex/TokenConcatenation.cpp b/lib/Lex/TokenConcatenation.cpp
index 92edca7..1563799 100644
--- a/lib/Lex/TokenConcatenation.cpp
+++ b/lib/Lex/TokenConcatenation.cpp
@@ -126,6 +126,14 @@
 /// don't want to track enough to tell "x.." from "...".
 bool TokenConcatenation::AvoidConcat(const Token &PrevTok,
                                      const Token &Tok) const {
+  // First, check to see if the tokens were directly adjacent in the original
+  // source.  If they were, it must be okay to stick them together: if there
+  // were an issue, the tokens would have been lexed differently.
+  if (PrevTok.getLocation().isFileID() && Tok.getLocation().isFileID() &&
+      PrevTok.getLocation().getFileLocWithOffset(PrevTok.getLength()) == 
+        Tok.getLocation())
+    return false;
+  
   tok::TokenKind PrevKind = PrevTok.getKind();
   if (PrevTok.getIdentifierInfo())  // Language keyword or named operator.
     PrevKind = tok::identifier;