blob: cda82ca8eef4287f5204030e5e6679f3700aaab1 [file] [log] [blame]
Guido van Rossumb51eaa11997-03-07 00:21:55 +00001"""Tokenization help for Python programs.
Guido van Rossum4d8e8591992-01-01 19:34:47 +00002
Tim Peters4efb6e92001-06-29 23:51:08 +00003generate_tokens(readline) is a generator that breaks a stream of
Guido van Rossum1aec3231997-04-08 14:24:39 +00004text into Python tokens. It accepts a readline-like method which is called
Tim Peters4efb6e92001-06-29 23:51:08 +00005repeatedly to get the next line of input (or "" for EOF). It generates
65-tuples with these members:
7
8 the token type (see token.py)
9 the token (a string)
10 the starting (row, column) indices of the token (a 2-tuple of ints)
11 the ending (row, column) indices of the token (a 2-tuple of ints)
12 the original line (string)
13
14It is designed to match the working of the Python tokenizer exactly, except
15that it produces COMMENT tokens for comments and gives type OP for all
16operators
17
18Older entry points
19 tokenize_loop(readline, tokeneater)
20 tokenize(readline, tokeneater=printtoken)
21are the same, except instead of generating tokens, tokeneater is a callback
22function to which the 5 fields described above are passed as 5 arguments,
23each time a new token is found."""
Guido van Rossumb51eaa11997-03-07 00:21:55 +000024
Ka-Ping Yee244c5932001-03-01 13:56:40 +000025__author__ = 'Ka-Ping Yee <ping@lfw.org>'
Ka-Ping Yee4f64c132001-03-01 17:11:17 +000026__credits__ = \
Thomas Wouters89f507f2006-12-13 04:49:30 +000027 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro, Raymond Hettinger'
Guido van Rossumb51eaa11997-03-07 00:21:55 +000028
Guido van Rossum3b631771997-10-27 20:44:15 +000029import string, re
Guido van Rossumfc6f5331997-03-07 00:21:12 +000030from token import *
Guido van Rossum4d8e8591992-01-01 19:34:47 +000031
Skip Montanaro40fc1602001-03-01 04:27:19 +000032import token
Raymond Hettinger78a7aee2002-11-05 06:06:02 +000033__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
Raymond Hettinger68c04532005-06-10 11:05:19 +000034 "generate_tokens", "NL", "untokenize"]
Neal Norwitze98d16e2002-03-26 16:20:26 +000035del x
Skip Montanaro40fc1602001-03-01 04:27:19 +000036del token
37
Guido van Rossum1aec3231997-04-08 14:24:39 +000038COMMENT = N_TOKENS
39tok_name[COMMENT] = 'COMMENT'
Guido van Rossuma90c78b1998-04-03 16:05:38 +000040NL = N_TOKENS + 1
41tok_name[NL] = 'NL'
Skip Montanaro40fc1602001-03-01 04:27:19 +000042N_TOKENS += 2
Guido van Rossum1aec3231997-04-08 14:24:39 +000043
Eric S. Raymondb08b2d32001-02-09 11:10:16 +000044def group(*choices): return '(' + '|'.join(choices) + ')'
Guido van Rossum68468eb2003-02-27 20:14:51 +000045def any(*choices): return group(*choices) + '*'
46def maybe(*choices): return group(*choices) + '?'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000047
Guido van Rossum3b631771997-10-27 20:44:15 +000048Whitespace = r'[ \f\t]*'
49Comment = r'#[^\r\n]*'
50Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
51Name = r'[a-zA-Z_]\w*'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000052
Guido van Rossum3b631771997-10-27 20:44:15 +000053Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
54Octnumber = r'0[0-7]*[lL]?'
55Decnumber = r'[1-9]\d*[lL]?'
Guido van Rossum1aec3231997-04-08 14:24:39 +000056Intnumber = group(Hexnumber, Octnumber, Decnumber)
Guido van Rossum3b631771997-10-27 20:44:15 +000057Exponent = r'[eE][-+]?\d+'
58Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Tim Petersd507dab2001-08-30 20:51:59 +000059Expfloat = r'\d+' + Exponent
Guido van Rossum1aec3231997-04-08 14:24:39 +000060Floatnumber = group(Pointfloat, Expfloat)
Tim Petersd507dab2001-08-30 20:51:59 +000061Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Guido van Rossum1aec3231997-04-08 14:24:39 +000062Number = group(Imagnumber, Floatnumber, Intnumber)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000063
Tim Petersde495832000-10-07 05:09:39 +000064# Tail end of ' string.
65Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
66# Tail end of " string.
67Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
68# Tail end of ''' string.
69Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
70# Tail end of """ string.
71Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000072Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
Tim Petersde495832000-10-07 05:09:39 +000073# Single-line ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000074String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
75 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
Guido van Rossum4d8e8591992-01-01 19:34:47 +000076
Tim Petersde495832000-10-07 05:09:39 +000077# Because of leftmost-then-longest match semantics, be sure to put the
78# longest operators first (e.g., if = came before ==, == would get
79# recognized as two instances of =).
Guido van Rossumb053cd82006-08-24 03:53:23 +000080Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
Neal Norwitzc1505362006-12-28 06:47:50 +000081 r"//=?", r"->",
Tim Petersde495832000-10-07 05:09:39 +000082 r"[+\-*/%&|^=<>]=?",
83 r"~")
Thomas Wouterse1519a12000-08-24 21:44:52 +000084
Guido van Rossum4d8e8591992-01-01 19:34:47 +000085Bracket = '[][(){}]'
Georg Brandldde00282007-03-18 19:01:53 +000086Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Guido van Rossumfc6f5331997-03-07 00:21:12 +000087Funny = group(Operator, Bracket, Special)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000088
Guido van Rossum3b631771997-10-27 20:44:15 +000089PlainToken = group(Number, Funny, String, Name)
Guido van Rossumfc6f5331997-03-07 00:21:12 +000090Token = Ignore + PlainToken
Guido van Rossum4d8e8591992-01-01 19:34:47 +000091
Tim Petersde495832000-10-07 05:09:39 +000092# First (or only) line of ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000093ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
94 group("'", r'\\\r?\n'),
95 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
96 group('"', r'\\\r?\n'))
Guido van Rossum3b631771997-10-27 20:44:15 +000097PseudoExtras = group(r'\\\r?\n', Comment, Triple)
98PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
Guido van Rossum1aec3231997-04-08 14:24:39 +000099
Guido van Rossum3b631771997-10-27 20:44:15 +0000100tokenprog, pseudoprog, single3prog, double3prog = map(
101 re.compile, (Token, PseudoToken, Single3, Double3))
Guido van Rossumfefc9221997-10-27 21:17:24 +0000102endprogs = {"'": re.compile(Single), '"': re.compile(Double),
Guido van Rossum3b631771997-10-27 20:44:15 +0000103 "'''": single3prog, '"""': double3prog,
Guido van Rossumfefc9221997-10-27 21:17:24 +0000104 "r'''": single3prog, 'r"""': double3prog,
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000105 "u'''": single3prog, 'u"""': double3prog,
106 "ur'''": single3prog, 'ur"""': double3prog,
107 "R'''": single3prog, 'R"""': double3prog,
108 "U'''": single3prog, 'U"""': double3prog,
109 "uR'''": single3prog, 'uR"""': double3prog,
110 "Ur'''": single3prog, 'Ur"""': double3prog,
111 "UR'''": single3prog, 'UR"""': double3prog,
112 'r': None, 'R': None, 'u': None, 'U': None}
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000113
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000114triple_quoted = {}
115for t in ("'''", '"""',
116 "r'''", 'r"""', "R'''", 'R"""',
117 "u'''", 'u"""', "U'''", 'U"""',
118 "ur'''", 'ur"""', "Ur'''", 'Ur"""',
119 "uR'''", 'uR"""', "UR'''", 'UR"""'):
120 triple_quoted[t] = t
121single_quoted = {}
122for t in ("'", '"',
123 "r'", 'r"', "R'", 'R"',
124 "u'", 'u"', "U'", 'U"',
125 "ur'", 'ur"', "Ur'", 'Ur"',
126 "uR'", 'uR"', "UR'", 'UR"' ):
127 single_quoted[t] = t
128
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000129tabsize = 8
Fred Drake9b8d8012000-08-17 04:45:13 +0000130
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000131class TokenError(Exception): pass
132
133class StopTokenizing(Exception): pass
Fred Drake9b8d8012000-08-17 04:45:13 +0000134
Guido van Rossum1aec3231997-04-08 14:24:39 +0000135def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
Guido van Rossumbe19ed72007-02-09 05:37:30 +0000136 print("%d,%d-%d,%d:\t%s\t%s" % \
137 (srow, scol, erow, ecol, tok_name[type], repr(token)))
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000138
Guido van Rossum1aec3231997-04-08 14:24:39 +0000139def tokenize(readline, tokeneater=printtoken):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000140 """
141 The tokenize() function accepts two parameters: one representing the
142 input stream, and one providing an output mechanism for tokenize().
Tim Peters8ac14952002-05-23 15:15:30 +0000143
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000144 The first parameter, readline, must be a callable object which provides
145 the same interface as the readline() method of built-in file objects.
Tim Peters8ac14952002-05-23 15:15:30 +0000146 Each call to the function should return one line of input as a string.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000147
148 The second parameter, tokeneater, must also be a callable object. It is
149 called once for each token, with five arguments, corresponding to the
Tim Peters8ac14952002-05-23 15:15:30 +0000150 tuples generated by generate_tokens().
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000151 """
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000152 try:
153 tokenize_loop(readline, tokeneater)
154 except StopTokenizing:
155 pass
156
Tim Peters4efb6e92001-06-29 23:51:08 +0000157# backwards compatible interface
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000158def tokenize_loop(readline, tokeneater):
Tim Peters5ca576e2001-06-18 22:08:13 +0000159 for token_info in generate_tokens(readline):
Guido van Rossum68468eb2003-02-27 20:14:51 +0000160 tokeneater(*token_info)
Tim Peters5ca576e2001-06-18 22:08:13 +0000161
Thomas Wouters89f507f2006-12-13 04:49:30 +0000162class Untokenizer:
163
164 def __init__(self):
165 self.tokens = []
166 self.prev_row = 1
167 self.prev_col = 0
168
169 def add_whitespace(self, start):
170 row, col = start
171 assert row <= self.prev_row
172 col_offset = col - self.prev_col
173 if col_offset:
174 self.tokens.append(" " * col_offset)
175
176 def untokenize(self, iterable):
177 for t in iterable:
178 if len(t) == 2:
179 self.compat(t, iterable)
180 break
181 tok_type, token, start, end, line = t
182 self.add_whitespace(start)
183 self.tokens.append(token)
184 self.prev_row, self.prev_col = end
185 if tok_type in (NEWLINE, NL):
186 self.prev_row += 1
187 self.prev_col = 0
188 return "".join(self.tokens)
189
190 def compat(self, token, iterable):
191 startline = False
192 indents = []
193 toks_append = self.tokens.append
194 toknum, tokval = token
195 if toknum in (NAME, NUMBER):
196 tokval += ' '
197 if toknum in (NEWLINE, NL):
198 startline = True
199 for tok in iterable:
200 toknum, tokval = tok[:2]
201
202 if toknum in (NAME, NUMBER):
203 tokval += ' '
204
205 if toknum == INDENT:
206 indents.append(tokval)
207 continue
208 elif toknum == DEDENT:
209 indents.pop()
210 continue
211 elif toknum in (NEWLINE, NL):
212 startline = True
213 elif startline and indents:
214 toks_append(indents[-1])
215 startline = False
216 toks_append(tokval)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000217
218def untokenize(iterable):
219 """Transform tokens back into Python source code.
220
221 Each element returned by the iterable must be a token sequence
Thomas Wouters89f507f2006-12-13 04:49:30 +0000222 with at least two elements, a token number and token value. If
223 only two tokens are passed, the resulting output is poor.
Raymond Hettinger68c04532005-06-10 11:05:19 +0000224
Thomas Wouters89f507f2006-12-13 04:49:30 +0000225 Round-trip invariant for full input:
226 Untokenized source will match input source exactly
227
228 Round-trip invariant for limited intput:
Raymond Hettinger68c04532005-06-10 11:05:19 +0000229 # Output text will tokenize the back to the input
230 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
231 newcode = untokenize(t1)
232 readline = iter(newcode.splitlines(1)).next
233 t2 = [tok[:2] for tokin generate_tokens(readline)]
234 assert t1 == t2
235 """
Thomas Wouters89f507f2006-12-13 04:49:30 +0000236 ut = Untokenizer()
237 return ut.untokenize(iterable)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000238
Tim Peters5ca576e2001-06-18 22:08:13 +0000239def generate_tokens(readline):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000240 """
241 The generate_tokens() generator requires one argment, readline, which
242 must be a callable object which provides the same interface as the
243 readline() method of built-in file objects. Each call to the function
Raymond Hettinger68c04532005-06-10 11:05:19 +0000244 should return one line of input as a string. Alternately, readline
245 can be a callable function terminating with StopIteration:
246 readline = open(myfile).next # Example of alternate readline
Tim Peters8ac14952002-05-23 15:15:30 +0000247
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000248 The generator produces 5-tuples with these members: the token type; the
249 token string; a 2-tuple (srow, scol) of ints specifying the row and
250 column where the token begins in the source; a 2-tuple (erow, ecol) of
251 ints specifying the row and column where the token ends in the source;
252 and the line on which the token was found. The line passed is the
Tim Peters8ac14952002-05-23 15:15:30 +0000253 logical line; continuation lines are included.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000254 """
Guido van Rossum1aec3231997-04-08 14:24:39 +0000255 lnum = parenlev = continued = 0
Fred Drake79e75e12001-07-20 19:05:50 +0000256 namechars, numchars = string.ascii_letters + '_', '0123456789'
Guido van Rossumde655271997-04-09 17:15:54 +0000257 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000258 contline = None
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000259 indents = [0]
Guido van Rossum1aec3231997-04-08 14:24:39 +0000260
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000261 while 1: # loop over lines in stream
Raymond Hettinger68c04532005-06-10 11:05:19 +0000262 try:
263 line = readline()
264 except StopIteration:
265 line = ''
Guido van Rossum1aec3231997-04-08 14:24:39 +0000266 lnum = lnum + 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000267 pos, max = 0, len(line)
268
269 if contstr: # continued string
Guido van Rossumde655271997-04-09 17:15:54 +0000270 if not line:
271 raise TokenError, ("EOF in multi-line string", strstart)
Guido van Rossum3b631771997-10-27 20:44:15 +0000272 endmatch = endprog.match(line)
273 if endmatch:
274 pos = end = endmatch.end(0)
Tim Peters5ca576e2001-06-18 22:08:13 +0000275 yield (STRING, contstr + line[:end],
Thomas Wouters89f507f2006-12-13 04:49:30 +0000276 strstart, (lnum, end), contline + line)
Guido van Rossumde655271997-04-09 17:15:54 +0000277 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000278 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000279 elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
Tim Peters5ca576e2001-06-18 22:08:13 +0000280 yield (ERRORTOKEN, contstr + line,
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000281 strstart, (lnum, len(line)), contline)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000282 contstr = ''
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000283 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000284 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000285 else:
286 contstr = contstr + line
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000287 contline = contline + line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000288 continue
289
Guido van Rossum1aec3231997-04-08 14:24:39 +0000290 elif parenlev == 0 and not continued: # new statement
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000291 if not line: break
292 column = 0
Guido van Rossum1aec3231997-04-08 14:24:39 +0000293 while pos < max: # measure leading whitespace
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000294 if line[pos] == ' ': column = column + 1
Guido van Rossum1aec3231997-04-08 14:24:39 +0000295 elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000296 elif line[pos] == '\f': column = 0
297 else: break
298 pos = pos + 1
Guido van Rossumde655271997-04-09 17:15:54 +0000299 if pos == max: break
Guido van Rossum1aec3231997-04-08 14:24:39 +0000300
301 if line[pos] in '#\r\n': # skip comments or blank lines
Thomas Wouters89f507f2006-12-13 04:49:30 +0000302 if line[pos] == '#':
303 comment_token = line[pos:].rstrip('\r\n')
304 nl_pos = pos + len(comment_token)
305 yield (COMMENT, comment_token,
306 (lnum, pos), (lnum, pos + len(comment_token)), line)
307 yield (NL, line[nl_pos:],
308 (lnum, nl_pos), (lnum, len(line)), line)
309 else:
310 yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
Guido van Rossum1aec3231997-04-08 14:24:39 +0000311 (lnum, pos), (lnum, len(line)), line)
312 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000313
314 if column > indents[-1]: # count indents or dedents
315 indents.append(column)
Tim Peters5ca576e2001-06-18 22:08:13 +0000316 yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000317 while column < indents[-1]:
Raymond Hettingerda99d1c2005-06-21 07:43:58 +0000318 if column not in indents:
319 raise IndentationError(
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000320 "unindent does not match any outer indentation level",
321 ("<tokenize>", lnum, pos, line))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000322 indents = indents[:-1]
Tim Peters5ca576e2001-06-18 22:08:13 +0000323 yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000324
325 else: # continued statement
Guido van Rossumde655271997-04-09 17:15:54 +0000326 if not line:
327 raise TokenError, ("EOF in multi-line statement", (lnum, 0))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000328 continued = 0
329
330 while pos < max:
Guido van Rossum3b631771997-10-27 20:44:15 +0000331 pseudomatch = pseudoprog.match(line, pos)
332 if pseudomatch: # scan for tokens
333 start, end = pseudomatch.span(1)
Guido van Rossumde655271997-04-09 17:15:54 +0000334 spos, epos, pos = (lnum, start), (lnum, end), end
Guido van Rossum1aec3231997-04-08 14:24:39 +0000335 token, initial = line[start:end], line[start]
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000336
Georg Brandldde00282007-03-18 19:01:53 +0000337 if (initial in numchars or # ordinary number
338 (initial == '.' and token != '.' and token != '...')):
Tim Peters5ca576e2001-06-18 22:08:13 +0000339 yield (NUMBER, token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000340 elif initial in '\r\n':
Thomas Wouters89f507f2006-12-13 04:49:30 +0000341 yield (NL if parenlev > 0 else NEWLINE,
342 token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000343 elif initial == '#':
Thomas Wouters89f507f2006-12-13 04:49:30 +0000344 assert not token.endswith("\n")
Tim Peters5ca576e2001-06-18 22:08:13 +0000345 yield (COMMENT, token, spos, epos, line)
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000346 elif token in triple_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000347 endprog = endprogs[token]
Guido van Rossum3b631771997-10-27 20:44:15 +0000348 endmatch = endprog.match(line, pos)
349 if endmatch: # all on one line
350 pos = endmatch.end(0)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000351 token = line[start:pos]
Tim Peters5ca576e2001-06-18 22:08:13 +0000352 yield (STRING, token, spos, (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000353 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000354 strstart = (lnum, start) # multiple lines
355 contstr = line[start:]
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000356 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000357 break
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000358 elif initial in single_quoted or \
359 token[:2] in single_quoted or \
360 token[:3] in single_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000361 if token[-1] == '\n': # continued string
Guido van Rossum1aec3231997-04-08 14:24:39 +0000362 strstart = (lnum, start)
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000363 endprog = (endprogs[initial] or endprogs[token[1]] or
364 endprogs[token[2]])
Guido van Rossumde655271997-04-09 17:15:54 +0000365 contstr, needcont = line[start:], 1
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000366 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000367 break
368 else: # ordinary string
Tim Peters5ca576e2001-06-18 22:08:13 +0000369 yield (STRING, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000370 elif initial in namechars: # ordinary name
Tim Peters5ca576e2001-06-18 22:08:13 +0000371 yield (NAME, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000372 elif initial == '\\': # continued stmt
373 continued = 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000374 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000375 if initial in '([{': parenlev = parenlev + 1
376 elif initial in ')]}': parenlev = parenlev - 1
Tim Peters5ca576e2001-06-18 22:08:13 +0000377 yield (OP, token, spos, epos, line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000378 else:
Tim Peters5ca576e2001-06-18 22:08:13 +0000379 yield (ERRORTOKEN, line[pos],
Guido van Rossumde655271997-04-09 17:15:54 +0000380 (lnum, pos), (lnum, pos+1), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000381 pos = pos + 1
382
383 for indent in indents[1:]: # pop remaining indent levels
Tim Peters5ca576e2001-06-18 22:08:13 +0000384 yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
385 yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000386
387if __name__ == '__main__': # testing
388 import sys
Guido van Rossumde655271997-04-09 17:15:54 +0000389 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
Guido van Rossum2b1566b1997-06-03 22:05:15 +0000390 else: tokenize(sys.stdin.readline)