blob: 61228e5762107b4ff55d75a80fe37a56c1026bad [file] [log] [blame]
Guido van Rossum47478871996-08-21 14:32:37 +00001#! /usr/bin/env python
2#
3# Tokens (from "token.h")
4#
5# This file is automatically generated; please don't muck it up!
6#
7# To update the symbols in this file, 'cd' to the top directory of
8# the python source tree after building the interpreter and run:
9#
10# PYTHONPATH=./Lib ./python Lib/token.py
11#
12# (this path allows the import of string.py and regexmodule.so
13# for a site with no installation in place)
Guido van Rossumb31c7f71993-11-11 10:31:23 +000014
Guido van Rossum47478871996-08-21 14:32:37 +000015#--start constants--
Guido van Rossumb31c7f71993-11-11 10:31:23 +000016ENDMARKER = 0
17NAME = 1
18NUMBER = 2
19STRING = 3
20NEWLINE = 4
21INDENT = 5
22DEDENT = 6
23LPAR = 7
24RPAR = 8
25LSQB = 9
26RSQB = 10
27COLON = 11
28COMMA = 12
29SEMI = 13
30PLUS = 14
31MINUS = 15
32STAR = 16
33SLASH = 17
34VBAR = 18
35AMPER = 19
36LESS = 20
37GREATER = 21
38EQUAL = 22
39DOT = 23
40PERCENT = 24
41BACKQUOTE = 25
42LBRACE = 26
43RBRACE = 27
44EQEQUAL = 28
45NOTEQUAL = 29
46LESSEQUAL = 30
47GREATEREQUAL = 31
48TILDE = 32
49CIRCUMFLEX = 33
50LEFTSHIFT = 34
51RIGHTSHIFT = 35
Guido van Rossum154a5391996-07-21 02:17:52 +000052DOUBLESTAR = 36
53OP = 37
54ERRORTOKEN = 38
Guido van Rossum47478871996-08-21 14:32:37 +000055N_TOKENS = 39
56NT_OFFSET = 256
57#--end constants--
Guido van Rossumb31c7f71993-11-11 10:31:23 +000058
59names = dir()
60tok_name = {}
61for name in names:
Guido van Rossum154a5391996-07-21 02:17:52 +000062 number = eval(name)
63 if type(number) is type(0):
Guido van Rossumb31c7f71993-11-11 10:31:23 +000064 tok_name[number] = name
65
Guido van Rossum154a5391996-07-21 02:17:52 +000066def ISTERMINAL(x):
67 return x < NT_OFFSET
68
69def ISNONTERMINAL(x):
70 return x >= NT_OFFSET
71
72def ISEOF(x):
73 return x == ENDMARKER
Guido van Rossum47478871996-08-21 14:32:37 +000074
75
76def main():
77 import regex
78 import string
79 import sys
80 args = sys.argv[1:]
81 inFileName = args and args[0] or "Include/token.h"
82 outFileName = "Lib/token.py"
83 if len(args) > 1:
84 outFileName = args[1]
85 try:
86 fp = open(inFileName)
87 except IOError, err:
88 sys.stdout.write("I/O error: %s\n" % str(err))
89 sys.exit(1)
90 lines = string.splitfields(fp.read(), "\n")
91 fp.close()
92 re = regex.compile(
93 "#define[ \t][ \t]*\([A-Z][A-Z_]*\)[ \t][ \t]*\([0-9][0-9]*\)",
94 regex.casefold)
95 tokens = {}
96 for line in lines:
97 if re.match(line) > -1:
98 name, val = re.group(1, 2)
99 val = string.atoi(val)
100 tokens[val] = name # reverse so we can sort them...
101 keys = tokens.keys()
102 keys.sort()
103 # load the output skeleton from the target:
104 try:
105 fp = open(outFileName)
106 except IOError, err:
107 sys.stderr.write("I/O error: %s\n" % str(err))
108 sys.exit(2)
109 format = string.splitfields(fp.read(), "\n")
110 fp.close()
111 try:
112 start = format.index("#--start constants--") + 1
113 end = format.index("#--end constants--")
114 except ValueError:
115 sys.stderr.write("target does not contain format markers")
116 sys.exit(3)
117 lines = []
118 for val in keys:
119 lines.append("%s = %d" % (tokens[val], val))
120 format[start:end] = lines
121 try:
122 fp = open(outFileName, 'w')
123 except IOError, err:
124 sys.stderr.write("I/O error: %s\n" % str(err))
125 sys.exit(4)
126 fp.write(string.joinfields(format, "\n"))
127 fp.close()
128
129
130if __name__ == "__main__":
131 main()