blob: 2ce435f585d58509d9097f8aed0f878bc44fbc57 [file] [log] [blame]
Raymond Hettinger68c04532005-06-10 11:05:19 +00001from test.test_support import verbose, findfile, is_resource_enabled
2import os, glob, random
3from tokenize import (tokenize, generate_tokens, untokenize,
4 NUMBER, NAME, OP, STRING)
Guido van Rossum0874f7f1997-10-27 22:15:06 +00005
Guido van Rossum0874f7f1997-10-27 22:15:06 +00006if verbose:
7 print 'starting...'
Tim Peters11cb8132003-05-12 19:29:36 +00008
Tim Peters0ff2ee02003-05-12 19:42:04 +00009f = file(findfile('tokenize_tests' + os.extsep + 'txt'))
Raymond Hettinger68c04532005-06-10 11:05:19 +000010tokenize(f.readline)
Tim Peters11cb8132003-05-12 19:29:36 +000011f.close()
12
Raymond Hettinger68c04532005-06-10 11:05:19 +000013
14
15###### Test roundtrip for untokenize ##########################
16
17def test_roundtrip(f):
18 ## print 'Testing:', f
19 f = file(f)
20 try:
21 fulltok = list(generate_tokens(f.readline))
22 finally:
23 f.close()
24
25 t1 = [tok[:2] for tok in fulltok]
26 newtext = untokenize(t1)
27 readline = iter(newtext.splitlines(1)).next
28 t2 = [tok[:2] for tok in generate_tokens(readline)]
29 assert t1 == t2
30
31
32f = findfile('tokenize_tests' + os.extsep + 'txt')
33test_roundtrip(f)
34
35testdir = os.path.dirname(f) or os.curdir
36testfiles = glob.glob(testdir + os.sep + 'test*.py')
37if not is_resource_enabled('compiler'):
38 testfiles = random.sample(testfiles, 10)
39
40for f in testfiles:
41 test_roundtrip(f)
42
43
44
45###### Test example in the docs ###############################
46
47from decimal import Decimal
48from cStringIO import StringIO
49
50def decistmt(s):
51 """Substitute Decimals for floats in a string of statements.
52
53 >>> from decimal import Decimal
54 >>> s = 'print +21.3e-5*-.1234/81.7'
55 >>> decistmt(s)
56 "print +Decimal ('21.3e-5')*-Decimal ('.1234')/Decimal ('81.7')"
57
58 >>> exec(s)
59 -3.21716034272e-007
60 >>> exec(decistmt(s))
61 -3.217160342717258261933904529E-7
62
63 """
64 result = []
65 g = generate_tokens(StringIO(s).readline) # tokenize the string
66 for toknum, tokval, _, _, _ in g:
67 if toknum == NUMBER and '.' in tokval: # replace NUMBER tokens
68 result.extend([
69 (NAME, 'Decimal'),
70 (OP, '('),
71 (STRING, repr(tokval)),
72 (OP, ')')
73 ])
74 else:
75 result.append((toknum, tokval))
76 return untokenize(result)
77
78import doctest
79doctest.testmod()
80
Guido van Rossum0874f7f1997-10-27 22:15:06 +000081if verbose:
82 print 'finished'