Fix running test_tokenize directly
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 90438e7..77c0423 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -3,7 +3,7 @@
STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
open as tokenize_open, Untokenizer)
from io import BytesIO
-from unittest import TestCase, mock
+from unittest import TestCase, mock, main
import os
import token
@@ -1564,4 +1564,4 @@
if __name__ == "__main__":
- unittest.main()
+ main()