Add tokenizer support and tests for u'', U"", uR'', Ur"", etc.
diff --git a/Lib/test/tokenize_tests.py b/Lib/test/tokenize_tests.py
index 4883668..4974671 100644
--- a/Lib/test/tokenize_tests.py
+++ b/Lib/test/tokenize_tests.py
@@ -110,6 +110,20 @@
bar \\ baz
""" + R'''spam
'''
+x = u'abc' + U'ABC'
+y = u"abc" + U"ABC"
+x = ur'abc' + Ur'ABC' + uR'ABC' + UR'ABC'
+y = ur"abc" + Ur"ABC" + uR"ABC" + UR"ABC"
+x = ur'\\' + UR'\\'
+x = ur'\'' + ''
+y = ur'''
+foo bar \\
+baz''' + UR'''
+foo'''
+y = Ur"""foo
+bar \\ baz
+""" + uR'''spam
+'''
# Indentation
if 1: