Issue2495: tokenize.untokenize did not insert space between two consecutive string literals:
"" "" becomes """", which is invalid code.
Backport of r61979.
diff --git a/Lib/test/output/test_tokenize b/Lib/test/output/test_tokenize
index b78a223..9a6cebe 100644
--- a/Lib/test/output/test_tokenize
+++ b/Lib/test/output/test_tokenize
@@ -656,4 +656,10 @@
177,11-177,15: NAME 'pass'
177,15-177,16: NEWLINE '\n'
178,0-178,1: NL '\n'
-179,0-179,0: ENDMARKER ''
+179,0-179,13: COMMENT '# Issue 2495\n'
+180,0-180,1: NAME 'x'
+180,2-180,3: OP '='
+180,4-180,6: STRING "''"
+180,7-180,9: STRING "''"
+180,9-180,10: NEWLINE '\n'
+181,0-181,0: ENDMARKER ''
diff --git a/Lib/test/tokenize_tests.txt b/Lib/test/tokenize_tests.txt
index 4ef3bf1..c095096 100644
--- a/Lib/test/tokenize_tests.txt
+++ b/Lib/test/tokenize_tests.txt
@@ -176,3 +176,5 @@
@staticmethod
def foo(): pass
+# Issue 2495
+x = '' ''
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index a9be4cf..0db3867 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -171,11 +171,12 @@
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
readline = iter(newcode.splitlines(1)).next
- t2 = [tok[:2] for tokin generate_tokens(readline)]
+ t2 = [tok[:2] for tok in generate_tokens(readline)]
assert t1 == t2
"""
startline = False
+ prevstring = False
indents = []
toks = []
toks_append = toks.append
@@ -185,6 +186,14 @@
if toknum in (NAME, NUMBER):
tokval += ' '
+ # Insert a space between two consecutive strings
+ if toknum == STRING:
+ if prevstring:
+ tokval = ' ' + tokval
+ prevstring = True
+ else:
+ prevstring = False
+
if toknum == INDENT:
indents.append(tokval)
continue