Added PEP 3127 support to tokenize (with tests); added PEP 3127 to NEWS.
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index d6cfb65..cbfafa8 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -4,7 +4,7 @@
>>> import glob, random, sys
The tests can be really simple. Given a small fragment of source
-code, print out a table with thokens. The ENDMARK is omitted for
+code, print out a table with tokens. The ENDMARK is omitted for
brevity.
>>> dump_tokens("1 + 1")
@@ -106,7 +106,7 @@
... "else: print 'Loaded'\\n")
True
-Balancing contunuation
+Balancing continuation
>>> roundtrip("a = (3,4, \\n"
... "5,6)\\n"
@@ -126,6 +126,14 @@
NUMBER '0xff' (1, 0) (1, 4)
OP '<=' (1, 5) (1, 7)
NUMBER '255' (1, 8) (1, 11)
+ >>> dump_tokens("0b10 <= 255")
+ NUMBER '0b10' (1, 0) (1, 4)
+ OP '<=' (1, 5) (1, 7)
+ NUMBER '255' (1, 8) (1, 11)
+ >>> dump_tokens("0o123 <= 0123")
+ NUMBER '0o123' (1, 0) (1, 5)
+ OP '<=' (1, 6) (1, 8)
+ NUMBER '0123' (1, 9) (1, 13)
>>> dump_tokens("01234567 > ~0x15")
NUMBER '01234567' (1, 0) (1, 8)
OP '>' (1, 9) (1, 10)