Fixes #10639: reindent.py should not convert newlines
reindent.py now will use the newline detected in the original file and will report an error if mixed newlines are encountered.
diff --git a/Misc/NEWS b/Misc/NEWS
index 662eedc..d91a993 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -310,6 +310,12 @@
"make install" creates symlinks in --prefix bin for the "-32" files in the
framework bin directory like the installer does.
+Tools/Demos
+-----------
+
+- Issue #10639: reindent.py no longer converts newlines and will raise
+ an error if attempting to convert a file with mixed newlines.
+
Tests
-----
diff --git a/Tools/scripts/reindent.py b/Tools/scripts/reindent.py
index bb41520..b18993b 100755
--- a/Tools/scripts/reindent.py
+++ b/Tools/scripts/reindent.py
@@ -35,7 +35,7 @@
The backup file is a copy of the one that is being reindented. The ".bak"
file is generated with shutil.copy(), but some corner cases regarding
-user/group and permissions could leave the backup file more readable that
+user/group and permissions could leave the backup file more readable than
you'd prefer. You can always use the --nobackup option to prevent this.
"""
@@ -109,7 +109,7 @@
if verbose:
print("checking", file, "...", end=' ')
- with open(file, 'rb') as f:
+ with open(file, 'rb') as f:
encoding, _ = tokenize.detect_encoding(f.readline)
try:
with open(file, encoding=encoding) as f:
@@ -118,6 +118,11 @@
errprint("%s: I/O Error: %s" % (file, str(msg)))
return
+ newline = r.newlines
+ if isinstance(newline, tuple):
+ errprint("%s: mixed newlines detected; cannot process file" % file)
+ return
+
if r.run():
if verbose:
print("changed.")
@@ -129,7 +134,7 @@
shutil.copyfile(file, bak)
if verbose:
print("backed up", file, "to", bak)
- with open(file, "w", encoding=encoding) as f:
+ with open(file, "w", encoding=encoding, newline=newline) as f:
r.write(f)
if verbose:
print("wrote new", file)
@@ -177,6 +182,10 @@
# indeed, they're our headache!
self.stats = []
+ # Save the newlines found in the file so they can be used to
+ # create output without mutating the newlines.
+ self.newlines = f.newlines
+
def run(self):
tokens = tokenize.generate_tokens(self.getline)
for _token in tokens: