blob: 0f68b4034bae2952a8135eadaa9006cddcf53df1 [file] [log] [blame]
Guido van Rossumb51eaa11997-03-07 00:21:55 +00001"""Tokenization help for Python programs.
Guido van Rossum4d8e8591992-01-01 19:34:47 +00002
Tim Peters4efb6e92001-06-29 23:51:08 +00003generate_tokens(readline) is a generator that breaks a stream of
Guido van Rossum1aec3231997-04-08 14:24:39 +00004text into Python tokens. It accepts a readline-like method which is called
Tim Peters4efb6e92001-06-29 23:51:08 +00005repeatedly to get the next line of input (or "" for EOF). It generates
65-tuples with these members:
7
8 the token type (see token.py)
9 the token (a string)
10 the starting (row, column) indices of the token (a 2-tuple of ints)
11 the ending (row, column) indices of the token (a 2-tuple of ints)
12 the original line (string)
13
14It is designed to match the working of the Python tokenizer exactly, except
15that it produces COMMENT tokens for comments and gives type OP for all
16operators
17
18Older entry points
19 tokenize_loop(readline, tokeneater)
20 tokenize(readline, tokeneater=printtoken)
21are the same, except instead of generating tokens, tokeneater is a callback
22function to which the 5 fields described above are passed as 5 arguments,
23each time a new token is found."""
Guido van Rossumb51eaa11997-03-07 00:21:55 +000024
Ka-Ping Yee244c5932001-03-01 13:56:40 +000025__author__ = 'Ka-Ping Yee <ping@lfw.org>'
Ka-Ping Yee4f64c132001-03-01 17:11:17 +000026__credits__ = \
Raymond Hettinger8a7e76b2006-12-02 02:00:39 +000027 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro, Raymond Hettinger'
Guido van Rossumb51eaa11997-03-07 00:21:55 +000028
Guido van Rossum3b631771997-10-27 20:44:15 +000029import string, re
Guido van Rossumfc6f5331997-03-07 00:21:12 +000030from token import *
Guido van Rossum4d8e8591992-01-01 19:34:47 +000031
Skip Montanaro40fc1602001-03-01 04:27:19 +000032import token
Raymond Hettinger78a7aee2002-11-05 06:06:02 +000033__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
Raymond Hettinger68c04532005-06-10 11:05:19 +000034 "generate_tokens", "NL", "untokenize"]
Neal Norwitze98d16e2002-03-26 16:20:26 +000035del x
Skip Montanaro40fc1602001-03-01 04:27:19 +000036del token
37
Guido van Rossum1aec3231997-04-08 14:24:39 +000038COMMENT = N_TOKENS
39tok_name[COMMENT] = 'COMMENT'
Guido van Rossuma90c78b1998-04-03 16:05:38 +000040NL = N_TOKENS + 1
41tok_name[NL] = 'NL'
Skip Montanaro40fc1602001-03-01 04:27:19 +000042N_TOKENS += 2
Guido van Rossum1aec3231997-04-08 14:24:39 +000043
Eric S. Raymondb08b2d32001-02-09 11:10:16 +000044def group(*choices): return '(' + '|'.join(choices) + ')'
Guido van Rossum68468eb2003-02-27 20:14:51 +000045def any(*choices): return group(*choices) + '*'
46def maybe(*choices): return group(*choices) + '?'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000047
Guido van Rossum3b631771997-10-27 20:44:15 +000048Whitespace = r'[ \f\t]*'
49Comment = r'#[^\r\n]*'
50Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
51Name = r'[a-zA-Z_]\w*'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000052
Georg Brandl14404b62008-01-19 19:27:05 +000053Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
Eric Smith0aed07a2008-03-17 19:43:40 +000054Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
55Binnumber = r'0[bB][01]+[lL]?'
Guido van Rossum3b631771997-10-27 20:44:15 +000056Decnumber = r'[1-9]\d*[lL]?'
Eric Smith0aed07a2008-03-17 19:43:40 +000057Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Guido van Rossum3b631771997-10-27 20:44:15 +000058Exponent = r'[eE][-+]?\d+'
59Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Tim Petersd507dab2001-08-30 20:51:59 +000060Expfloat = r'\d+' + Exponent
Guido van Rossum1aec3231997-04-08 14:24:39 +000061Floatnumber = group(Pointfloat, Expfloat)
Tim Petersd507dab2001-08-30 20:51:59 +000062Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Guido van Rossum1aec3231997-04-08 14:24:39 +000063Number = group(Imagnumber, Floatnumber, Intnumber)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000064
Tim Petersde495832000-10-07 05:09:39 +000065# Tail end of ' string.
66Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
67# Tail end of " string.
68Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
69# Tail end of ''' string.
70Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
71# Tail end of """ string.
72Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000073Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
Tim Petersde495832000-10-07 05:09:39 +000074# Single-line ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000075String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
76 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
Guido van Rossum4d8e8591992-01-01 19:34:47 +000077
Tim Petersde495832000-10-07 05:09:39 +000078# Because of leftmost-then-longest match semantics, be sure to put the
79# longest operators first (e.g., if = came before ==, == would get
80# recognized as two instances of =).
81Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
Guido van Rossum96204f52001-08-08 05:04:07 +000082 r"//=?",
Tim Petersde495832000-10-07 05:09:39 +000083 r"[+\-*/%&|^=<>]=?",
84 r"~")
Thomas Wouterse1519a12000-08-24 21:44:52 +000085
Guido van Rossum4d8e8591992-01-01 19:34:47 +000086Bracket = '[][(){}]'
Anthony Baxterc2a5a632004-08-02 06:10:11 +000087Special = group(r'\r?\n', r'[:;.,`@]')
Guido van Rossumfc6f5331997-03-07 00:21:12 +000088Funny = group(Operator, Bracket, Special)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000089
Guido van Rossum3b631771997-10-27 20:44:15 +000090PlainToken = group(Number, Funny, String, Name)
Guido van Rossumfc6f5331997-03-07 00:21:12 +000091Token = Ignore + PlainToken
Guido van Rossum4d8e8591992-01-01 19:34:47 +000092
Tim Petersde495832000-10-07 05:09:39 +000093# First (or only) line of ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000094ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
95 group("'", r'\\\r?\n'),
96 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
97 group('"', r'\\\r?\n'))
Guido van Rossum3b631771997-10-27 20:44:15 +000098PseudoExtras = group(r'\\\r?\n', Comment, Triple)
99PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000100
Guido van Rossum3b631771997-10-27 20:44:15 +0000101tokenprog, pseudoprog, single3prog, double3prog = map(
102 re.compile, (Token, PseudoToken, Single3, Double3))
Guido van Rossumfefc9221997-10-27 21:17:24 +0000103endprogs = {"'": re.compile(Single), '"': re.compile(Double),
Guido van Rossum3b631771997-10-27 20:44:15 +0000104 "'''": single3prog, '"""': double3prog,
Guido van Rossumfefc9221997-10-27 21:17:24 +0000105 "r'''": single3prog, 'r"""': double3prog,
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000106 "u'''": single3prog, 'u"""': double3prog,
107 "ur'''": single3prog, 'ur"""': double3prog,
108 "R'''": single3prog, 'R"""': double3prog,
109 "U'''": single3prog, 'U"""': double3prog,
110 "uR'''": single3prog, 'uR"""': double3prog,
111 "Ur'''": single3prog, 'Ur"""': double3prog,
112 "UR'''": single3prog, 'UR"""': double3prog,
Christian Heimes288e89a2008-01-18 18:24:07 +0000113 "b'''": single3prog, 'b"""': double3prog,
114 "br'''": single3prog, 'br"""': double3prog,
115 "B'''": single3prog, 'B"""': double3prog,
116 "bR'''": single3prog, 'bR"""': double3prog,
117 "Br'''": single3prog, 'Br"""': double3prog,
118 "BR'''": single3prog, 'BR"""': double3prog,
119 'r': None, 'R': None, 'u': None, 'U': None,
120 'b': None, 'B': None}
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000121
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000122triple_quoted = {}
123for t in ("'''", '"""',
124 "r'''", 'r"""', "R'''", 'R"""',
125 "u'''", 'u"""', "U'''", 'U"""',
126 "ur'''", 'ur"""', "Ur'''", 'Ur"""',
Christian Heimes288e89a2008-01-18 18:24:07 +0000127 "uR'''", 'uR"""', "UR'''", 'UR"""',
128 "b'''", 'b"""', "B'''", 'B"""',
129 "br'''", 'br"""', "Br'''", 'Br"""',
130 "bR'''", 'bR"""', "BR'''", 'BR"""'):
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000131 triple_quoted[t] = t
132single_quoted = {}
133for t in ("'", '"',
134 "r'", 'r"', "R'", 'R"',
135 "u'", 'u"', "U'", 'U"',
136 "ur'", 'ur"', "Ur'", 'Ur"',
Christian Heimes288e89a2008-01-18 18:24:07 +0000137 "uR'", 'uR"', "UR'", 'UR"',
138 "b'", 'b"', "B'", 'B"',
139 "br'", 'br"', "Br'", 'Br"',
140 "bR'", 'bR"', "BR'", 'BR"' ):
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000141 single_quoted[t] = t
142
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000143tabsize = 8
Fred Drake9b8d8012000-08-17 04:45:13 +0000144
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000145class TokenError(Exception): pass
146
147class StopTokenizing(Exception): pass
Fred Drake9b8d8012000-08-17 04:45:13 +0000148
Guido van Rossum1aec3231997-04-08 14:24:39 +0000149def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
150 print "%d,%d-%d,%d:\t%s\t%s" % \
151 (srow, scol, erow, ecol, tok_name[type], repr(token))
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000152
Guido van Rossum1aec3231997-04-08 14:24:39 +0000153def tokenize(readline, tokeneater=printtoken):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000154 """
155 The tokenize() function accepts two parameters: one representing the
156 input stream, and one providing an output mechanism for tokenize().
Tim Peters8ac14952002-05-23 15:15:30 +0000157
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000158 The first parameter, readline, must be a callable object which provides
159 the same interface as the readline() method of built-in file objects.
Tim Peters8ac14952002-05-23 15:15:30 +0000160 Each call to the function should return one line of input as a string.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000161
162 The second parameter, tokeneater, must also be a callable object. It is
163 called once for each token, with five arguments, corresponding to the
Tim Peters8ac14952002-05-23 15:15:30 +0000164 tuples generated by generate_tokens().
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000165 """
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000166 try:
167 tokenize_loop(readline, tokeneater)
168 except StopTokenizing:
169 pass
170
Tim Peters4efb6e92001-06-29 23:51:08 +0000171# backwards compatible interface
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000172def tokenize_loop(readline, tokeneater):
Tim Peters5ca576e2001-06-18 22:08:13 +0000173 for token_info in generate_tokens(readline):
Guido van Rossum68468eb2003-02-27 20:14:51 +0000174 tokeneater(*token_info)
Tim Peters5ca576e2001-06-18 22:08:13 +0000175
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000176class Untokenizer:
177
178 def __init__(self):
179 self.tokens = []
180 self.prev_row = 1
181 self.prev_col = 0
182
183 def add_whitespace(self, start):
184 row, col = start
Jeremy Hylton39c532c2006-08-23 21:26:46 +0000185 assert row <= self.prev_row
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000186 col_offset = col - self.prev_col
187 if col_offset:
188 self.tokens.append(" " * col_offset)
189
190 def untokenize(self, iterable):
191 for t in iterable:
192 if len(t) == 2:
193 self.compat(t, iterable)
194 break
195 tok_type, token, start, end, line = t
196 self.add_whitespace(start)
197 self.tokens.append(token)
198 self.prev_row, self.prev_col = end
199 if tok_type in (NEWLINE, NL):
200 self.prev_row += 1
201 self.prev_col = 0
202 return "".join(self.tokens)
203
204 def compat(self, token, iterable):
205 startline = False
206 indents = []
207 toks_append = self.tokens.append
208 toknum, tokval = token
209 if toknum in (NAME, NUMBER):
210 tokval += ' '
211 if toknum in (NEWLINE, NL):
212 startline = True
Amaury Forgeot d'Arcda0c0252008-03-27 23:23:54 +0000213 prevstring = False
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000214 for tok in iterable:
215 toknum, tokval = tok[:2]
216
217 if toknum in (NAME, NUMBER):
218 tokval += ' '
219
Amaury Forgeot d'Arcda0c0252008-03-27 23:23:54 +0000220 # Insert a space between two consecutive strings
221 if toknum == STRING:
222 if prevstring:
223 tokval = ' ' + tokval
224 prevstring = True
225 else:
226 prevstring = False
227
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000228 if toknum == INDENT:
229 indents.append(tokval)
230 continue
231 elif toknum == DEDENT:
232 indents.pop()
233 continue
234 elif toknum in (NEWLINE, NL):
235 startline = True
236 elif startline and indents:
237 toks_append(indents[-1])
238 startline = False
239 toks_append(tokval)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000240
241def untokenize(iterable):
242 """Transform tokens back into Python source code.
243
244 Each element returned by the iterable must be a token sequence
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000245 with at least two elements, a token number and token value. If
246 only two tokens are passed, the resulting output is poor.
Raymond Hettinger68c04532005-06-10 11:05:19 +0000247
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000248 Round-trip invariant for full input:
249 Untokenized source will match input source exactly
250
251 Round-trip invariant for limited intput:
Raymond Hettinger68c04532005-06-10 11:05:19 +0000252 # Output text will tokenize the back to the input
253 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
254 newcode = untokenize(t1)
255 readline = iter(newcode.splitlines(1)).next
Amaury Forgeot d'Arcda0c0252008-03-27 23:23:54 +0000256 t2 = [tok[:2] for tok in generate_tokens(readline)]
Raymond Hettinger68c04532005-06-10 11:05:19 +0000257 assert t1 == t2
258 """
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000259 ut = Untokenizer()
260 return ut.untokenize(iterable)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000261
Tim Peters5ca576e2001-06-18 22:08:13 +0000262def generate_tokens(readline):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000263 """
264 The generate_tokens() generator requires one argment, readline, which
265 must be a callable object which provides the same interface as the
266 readline() method of built-in file objects. Each call to the function
Raymond Hettinger68c04532005-06-10 11:05:19 +0000267 should return one line of input as a string. Alternately, readline
268 can be a callable function terminating with StopIteration:
269 readline = open(myfile).next # Example of alternate readline
Tim Peters8ac14952002-05-23 15:15:30 +0000270
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000271 The generator produces 5-tuples with these members: the token type; the
272 token string; a 2-tuple (srow, scol) of ints specifying the row and
273 column where the token begins in the source; a 2-tuple (erow, ecol) of
274 ints specifying the row and column where the token ends in the source;
275 and the line on which the token was found. The line passed is the
Tim Peters8ac14952002-05-23 15:15:30 +0000276 logical line; continuation lines are included.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000277 """
Guido van Rossum1aec3231997-04-08 14:24:39 +0000278 lnum = parenlev = continued = 0
Fred Drake79e75e12001-07-20 19:05:50 +0000279 namechars, numchars = string.ascii_letters + '_', '0123456789'
Guido van Rossumde655271997-04-09 17:15:54 +0000280 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000281 contline = None
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000282 indents = [0]
Guido van Rossum1aec3231997-04-08 14:24:39 +0000283
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000284 while 1: # loop over lines in stream
Raymond Hettinger68c04532005-06-10 11:05:19 +0000285 try:
286 line = readline()
287 except StopIteration:
288 line = ''
Guido van Rossum1aec3231997-04-08 14:24:39 +0000289 lnum = lnum + 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000290 pos, max = 0, len(line)
291
292 if contstr: # continued string
Guido van Rossumde655271997-04-09 17:15:54 +0000293 if not line:
294 raise TokenError, ("EOF in multi-line string", strstart)
Guido van Rossum3b631771997-10-27 20:44:15 +0000295 endmatch = endprog.match(line)
296 if endmatch:
297 pos = end = endmatch.end(0)
Tim Peters5ca576e2001-06-18 22:08:13 +0000298 yield (STRING, contstr + line[:end],
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000299 strstart, (lnum, end), contline + line)
Guido van Rossumde655271997-04-09 17:15:54 +0000300 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000301 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000302 elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
Tim Peters5ca576e2001-06-18 22:08:13 +0000303 yield (ERRORTOKEN, contstr + line,
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000304 strstart, (lnum, len(line)), contline)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000305 contstr = ''
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000306 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000307 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000308 else:
309 contstr = contstr + line
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000310 contline = contline + line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000311 continue
312
Guido van Rossum1aec3231997-04-08 14:24:39 +0000313 elif parenlev == 0 and not continued: # new statement
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000314 if not line: break
315 column = 0
Guido van Rossum1aec3231997-04-08 14:24:39 +0000316 while pos < max: # measure leading whitespace
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000317 if line[pos] == ' ': column = column + 1
Guido van Rossum1aec3231997-04-08 14:24:39 +0000318 elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000319 elif line[pos] == '\f': column = 0
320 else: break
321 pos = pos + 1
Guido van Rossumde655271997-04-09 17:15:54 +0000322 if pos == max: break
Guido van Rossum1aec3231997-04-08 14:24:39 +0000323
324 if line[pos] in '#\r\n': # skip comments or blank lines
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000325 if line[pos] == '#':
326 comment_token = line[pos:].rstrip('\r\n')
327 nl_pos = pos + len(comment_token)
328 yield (COMMENT, comment_token,
329 (lnum, pos), (lnum, pos + len(comment_token)), line)
330 yield (NL, line[nl_pos:],
331 (lnum, nl_pos), (lnum, len(line)), line)
332 else:
333 yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
Guido van Rossum1aec3231997-04-08 14:24:39 +0000334 (lnum, pos), (lnum, len(line)), line)
335 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000336
337 if column > indents[-1]: # count indents or dedents
338 indents.append(column)
Tim Peters5ca576e2001-06-18 22:08:13 +0000339 yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000340 while column < indents[-1]:
Raymond Hettingerda99d1c2005-06-21 07:43:58 +0000341 if column not in indents:
342 raise IndentationError(
Georg Brandl2463f8f2006-08-14 21:34:08 +0000343 "unindent does not match any outer indentation level",
344 ("<tokenize>", lnum, pos, line))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000345 indents = indents[:-1]
Tim Peters5ca576e2001-06-18 22:08:13 +0000346 yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000347
348 else: # continued statement
Guido van Rossumde655271997-04-09 17:15:54 +0000349 if not line:
350 raise TokenError, ("EOF in multi-line statement", (lnum, 0))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000351 continued = 0
352
353 while pos < max:
Guido van Rossum3b631771997-10-27 20:44:15 +0000354 pseudomatch = pseudoprog.match(line, pos)
355 if pseudomatch: # scan for tokens
356 start, end = pseudomatch.span(1)
Guido van Rossumde655271997-04-09 17:15:54 +0000357 spos, epos, pos = (lnum, start), (lnum, end), end
Guido van Rossum1aec3231997-04-08 14:24:39 +0000358 token, initial = line[start:end], line[start]
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000359
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000360 if initial in numchars or \
361 (initial == '.' and token != '.'): # ordinary number
Tim Peters5ca576e2001-06-18 22:08:13 +0000362 yield (NUMBER, token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000363 elif initial in '\r\n':
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000364 yield (NL if parenlev > 0 else NEWLINE,
365 token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000366 elif initial == '#':
Jeremy Hylton76467ba2006-08-23 21:14:03 +0000367 assert not token.endswith("\n")
Tim Peters5ca576e2001-06-18 22:08:13 +0000368 yield (COMMENT, token, spos, epos, line)
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000369 elif token in triple_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000370 endprog = endprogs[token]
Guido van Rossum3b631771997-10-27 20:44:15 +0000371 endmatch = endprog.match(line, pos)
372 if endmatch: # all on one line
373 pos = endmatch.end(0)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000374 token = line[start:pos]
Tim Peters5ca576e2001-06-18 22:08:13 +0000375 yield (STRING, token, spos, (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000376 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000377 strstart = (lnum, start) # multiple lines
378 contstr = line[start:]
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000379 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000380 break
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000381 elif initial in single_quoted or \
382 token[:2] in single_quoted or \
383 token[:3] in single_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000384 if token[-1] == '\n': # continued string
Guido van Rossum1aec3231997-04-08 14:24:39 +0000385 strstart = (lnum, start)
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000386 endprog = (endprogs[initial] or endprogs[token[1]] or
387 endprogs[token[2]])
Guido van Rossumde655271997-04-09 17:15:54 +0000388 contstr, needcont = line[start:], 1
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000389 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000390 break
391 else: # ordinary string
Tim Peters5ca576e2001-06-18 22:08:13 +0000392 yield (STRING, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000393 elif initial in namechars: # ordinary name
Tim Peters5ca576e2001-06-18 22:08:13 +0000394 yield (NAME, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000395 elif initial == '\\': # continued stmt
396 continued = 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000397 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000398 if initial in '([{': parenlev = parenlev + 1
399 elif initial in ')]}': parenlev = parenlev - 1
Tim Peters5ca576e2001-06-18 22:08:13 +0000400 yield (OP, token, spos, epos, line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000401 else:
Tim Peters5ca576e2001-06-18 22:08:13 +0000402 yield (ERRORTOKEN, line[pos],
Guido van Rossumde655271997-04-09 17:15:54 +0000403 (lnum, pos), (lnum, pos+1), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000404 pos = pos + 1
405
406 for indent in indents[1:]: # pop remaining indent levels
Tim Peters5ca576e2001-06-18 22:08:13 +0000407 yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
408 yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000409
410if __name__ == '__main__': # testing
411 import sys
Guido van Rossumde655271997-04-09 17:15:54 +0000412 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
Guido van Rossum2b1566b1997-06-03 22:05:15 +0000413 else: tokenize(sys.stdin.readline)