- Issue #11250: Back port fix from 3.3 branch, so that 2to3 can handle files
with line feeds. This was ported from the sandbox to the 3.3 branch, but
didn't make it into 3.2.
- Re-enable lib2to3's test_parser.py tests, though with an expected failure
(see issue 13125).
diff --git a/Lib/lib2to3/patcomp.py b/Lib/lib2to3/patcomp.py
index bb538d5..0a259e9 100644
--- a/Lib/lib2to3/patcomp.py
+++ b/Lib/lib2to3/patcomp.py
@@ -11,6 +11,7 @@
__author__ = "Guido van Rossum <guido@python.org>"
# Python imports
+import io
import os
# Fairly local imports
@@ -32,7 +33,7 @@
def tokenize_wrapper(input):
"""Tokenizes a string suppressing significant whitespace."""
skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
- tokens = tokenize.generate_tokens(driver.generate_lines(input).__next__)
+ tokens = tokenize.generate_tokens(io.StringIO(input).readline)
for quintuple in tokens:
type, value, start, end, line_text = quintuple
if type not in skip:
diff --git a/Lib/lib2to3/pgen2/driver.py b/Lib/lib2to3/pgen2/driver.py
index ee77a13..e7828ff 100644
--- a/Lib/lib2to3/pgen2/driver.py
+++ b/Lib/lib2to3/pgen2/driver.py
@@ -17,6 +17,7 @@
# Python imports
import codecs
+import io
import os
import logging
import sys
@@ -101,18 +102,10 @@
def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree."""
- tokens = tokenize.generate_tokens(generate_lines(text).__next__)
+ tokens = tokenize.generate_tokens(io.StringIO(text).readline)
return self.parse_tokens(tokens, debug)
-def generate_lines(text):
- """Generator that behaves like readline without using StringIO."""
- for line in text.splitlines(True):
- yield line
- while True:
- yield ""
-
-
def load_grammar(gt="Grammar.txt", gp=None,
save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle)."""
diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py
index ce39e41..f32404c 100644
--- a/Lib/lib2to3/tests/test_parser.py
+++ b/Lib/lib2to3/tests/test_parser.py
@@ -14,10 +14,21 @@
# Python imports
import os
+import unittest
# Local imports
from lib2to3.pgen2 import tokenize
from ..pgen2.parse import ParseError
+from lib2to3.pygram import python_symbols as syms
+
+
+class TestDriver(support.TestCase):
+
+ def test_formfeed(self):
+ s = """print 1\n\x0Cprint 2\n"""
+ t = driver.parse_string(s)
+ self.assertEqual(t.children[0].children[0].type, syms.print_stmt)
+ self.assertEqual(t.children[1].children[0].type, syms.print_stmt)
class GrammarTest(support.TestCase):
@@ -147,19 +158,22 @@
"""A cut-down version of pytree_idempotency.py."""
+ # Issue 13125
+ @unittest.expectedFailure
def test_all_project_files(self):
for filepath in support.all_project_files():
with open(filepath, "rb") as fp:
encoding = tokenize.detect_encoding(fp.readline)[0]
self.assertTrue(encoding is not None,
"can't detect encoding for %s" % filepath)
- with open(filepath, "r") as fp:
+ with open(filepath, "r", encoding=encoding) as fp:
source = fp.read()
- source = source.decode(encoding)
- tree = driver.parse_string(source)
+ try:
+ tree = driver.parse_string(source)
+ except ParseError as err:
+ print('ParseError on file', filepath, err)
+ continue
new = str(tree)
- if encoding:
- new = new.encode(encoding)
if diff(filepath, new):
self.fail("Idempotency failed: %s" % filepath)
@@ -202,14 +216,14 @@
self.validate(s)
-def diff(fn, result, encoding):
- f = open("@", "w")
+def diff(fn, result):
try:
- f.write(result.encode(encoding))
- finally:
- f.close()
- try:
+ with open('@', 'w') as f:
+ f.write(str(result))
fn = fn.replace('"', '\\"')
return os.system('diff -u "%s" @' % fn)
finally:
- os.remove("@")
+ try:
+ os.remove("@")
+ except OSError:
+ pass
diff --git a/Lib/test/test_lib2to3.py b/Lib/test/test_lib2to3.py
index 0d6f9a3..1afaf70 100644
--- a/Lib/test/test_lib2to3.py
+++ b/Lib/test/test_lib2to3.py
@@ -1,6 +1,7 @@
# Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
+ test_parser,
test_main as test_main_)
import unittest
from test.support import run_unittest
@@ -9,7 +10,7 @@
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers, test_pytree,test_util, test_refactor,
- test_main_):
+ test_parser, test_main_):
tests.addTests(loader.loadTestsFromModule(m))
return tests