blob: 1a72d6fc59a32d72b5c2160d890ef6570d40cd35 [file] [log] [blame]
Guido van Rossumb51eaa11997-03-07 00:21:55 +00001"""Tokenization help for Python programs.
Guido van Rossum4d8e8591992-01-01 19:34:47 +00002
Tim Peters4efb6e92001-06-29 23:51:08 +00003generate_tokens(readline) is a generator that breaks a stream of
Guido van Rossum1aec3231997-04-08 14:24:39 +00004text into Python tokens. It accepts a readline-like method which is called
Tim Peters4efb6e92001-06-29 23:51:08 +00005repeatedly to get the next line of input (or "" for EOF). It generates
65-tuples with these members:
7
8 the token type (see token.py)
9 the token (a string)
10 the starting (row, column) indices of the token (a 2-tuple of ints)
11 the ending (row, column) indices of the token (a 2-tuple of ints)
12 the original line (string)
13
14It is designed to match the working of the Python tokenizer exactly, except
15that it produces COMMENT tokens for comments and gives type OP for all
16operators
17
18Older entry points
19 tokenize_loop(readline, tokeneater)
20 tokenize(readline, tokeneater=printtoken)
21are the same, except instead of generating tokens, tokeneater is a callback
22function to which the 5 fields described above are passed as 5 arguments,
23each time a new token is found."""
Guido van Rossumb51eaa11997-03-07 00:21:55 +000024
Ka-Ping Yee244c5932001-03-01 13:56:40 +000025__author__ = 'Ka-Ping Yee <ping@lfw.org>'
Ka-Ping Yee4f64c132001-03-01 17:11:17 +000026__credits__ = \
Thomas Wouters89f507f2006-12-13 04:49:30 +000027 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro, Raymond Hettinger'
Guido van Rossumb51eaa11997-03-07 00:21:55 +000028
Guido van Rossum3b631771997-10-27 20:44:15 +000029import string, re
Guido van Rossumfc6f5331997-03-07 00:21:12 +000030from token import *
Guido van Rossum4d8e8591992-01-01 19:34:47 +000031
Skip Montanaro40fc1602001-03-01 04:27:19 +000032import token
Raymond Hettinger78a7aee2002-11-05 06:06:02 +000033__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
Raymond Hettinger68c04532005-06-10 11:05:19 +000034 "generate_tokens", "NL", "untokenize"]
Skip Montanaro40fc1602001-03-01 04:27:19 +000035del token
36
Guido van Rossum1aec3231997-04-08 14:24:39 +000037COMMENT = N_TOKENS
38tok_name[COMMENT] = 'COMMENT'
Guido van Rossuma90c78b1998-04-03 16:05:38 +000039NL = N_TOKENS + 1
40tok_name[NL] = 'NL'
Skip Montanaro40fc1602001-03-01 04:27:19 +000041N_TOKENS += 2
Guido van Rossum1aec3231997-04-08 14:24:39 +000042
Eric S. Raymondb08b2d32001-02-09 11:10:16 +000043def group(*choices): return '(' + '|'.join(choices) + ')'
Guido van Rossum68468eb2003-02-27 20:14:51 +000044def any(*choices): return group(*choices) + '*'
45def maybe(*choices): return group(*choices) + '?'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000046
Guido van Rossum3b631771997-10-27 20:44:15 +000047Whitespace = r'[ \f\t]*'
48Comment = r'#[^\r\n]*'
49Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
50Name = r'[a-zA-Z_]\w*'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000051
Guido van Rossum3b631771997-10-27 20:44:15 +000052Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
53Octnumber = r'0[0-7]*[lL]?'
54Decnumber = r'[1-9]\d*[lL]?'
Guido van Rossum1aec3231997-04-08 14:24:39 +000055Intnumber = group(Hexnumber, Octnumber, Decnumber)
Guido van Rossum3b631771997-10-27 20:44:15 +000056Exponent = r'[eE][-+]?\d+'
57Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Tim Petersd507dab2001-08-30 20:51:59 +000058Expfloat = r'\d+' + Exponent
Guido van Rossum1aec3231997-04-08 14:24:39 +000059Floatnumber = group(Pointfloat, Expfloat)
Tim Petersd507dab2001-08-30 20:51:59 +000060Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Guido van Rossum1aec3231997-04-08 14:24:39 +000061Number = group(Imagnumber, Floatnumber, Intnumber)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000062
Tim Petersde495832000-10-07 05:09:39 +000063# Tail end of ' string.
64Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
65# Tail end of " string.
66Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
67# Tail end of ''' string.
68Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
69# Tail end of """ string.
70Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000071Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
Tim Petersde495832000-10-07 05:09:39 +000072# Single-line ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000073String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
74 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
Guido van Rossum4d8e8591992-01-01 19:34:47 +000075
Tim Petersde495832000-10-07 05:09:39 +000076# Because of leftmost-then-longest match semantics, be sure to put the
77# longest operators first (e.g., if = came before ==, == would get
78# recognized as two instances of =).
Guido van Rossumb053cd82006-08-24 03:53:23 +000079Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
Neal Norwitzc1505362006-12-28 06:47:50 +000080 r"//=?", r"->",
Tim Petersde495832000-10-07 05:09:39 +000081 r"[+\-*/%&|^=<>]=?",
82 r"~")
Thomas Wouterse1519a12000-08-24 21:44:52 +000083
Guido van Rossum4d8e8591992-01-01 19:34:47 +000084Bracket = '[][(){}]'
Georg Brandldde00282007-03-18 19:01:53 +000085Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Guido van Rossumfc6f5331997-03-07 00:21:12 +000086Funny = group(Operator, Bracket, Special)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000087
Guido van Rossum3b631771997-10-27 20:44:15 +000088PlainToken = group(Number, Funny, String, Name)
Guido van Rossumfc6f5331997-03-07 00:21:12 +000089Token = Ignore + PlainToken
Guido van Rossum4d8e8591992-01-01 19:34:47 +000090
Tim Petersde495832000-10-07 05:09:39 +000091# First (or only) line of ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000092ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
93 group("'", r'\\\r?\n'),
94 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
95 group('"', r'\\\r?\n'))
Guido van Rossum3b631771997-10-27 20:44:15 +000096PseudoExtras = group(r'\\\r?\n', Comment, Triple)
97PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
Guido van Rossum1aec3231997-04-08 14:24:39 +000098
Guido van Rossum3b631771997-10-27 20:44:15 +000099tokenprog, pseudoprog, single3prog, double3prog = map(
100 re.compile, (Token, PseudoToken, Single3, Double3))
Guido van Rossumfefc9221997-10-27 21:17:24 +0000101endprogs = {"'": re.compile(Single), '"': re.compile(Double),
Guido van Rossum3b631771997-10-27 20:44:15 +0000102 "'''": single3prog, '"""': double3prog,
Guido van Rossumfefc9221997-10-27 21:17:24 +0000103 "r'''": single3prog, 'r"""': double3prog,
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000104 "u'''": single3prog, 'u"""': double3prog,
105 "ur'''": single3prog, 'ur"""': double3prog,
106 "R'''": single3prog, 'R"""': double3prog,
107 "U'''": single3prog, 'U"""': double3prog,
108 "uR'''": single3prog, 'uR"""': double3prog,
109 "Ur'''": single3prog, 'Ur"""': double3prog,
110 "UR'''": single3prog, 'UR"""': double3prog,
111 'r': None, 'R': None, 'u': None, 'U': None}
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000112
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000113triple_quoted = {}
114for t in ("'''", '"""',
115 "r'''", 'r"""', "R'''", 'R"""',
116 "u'''", 'u"""', "U'''", 'U"""',
117 "ur'''", 'ur"""', "Ur'''", 'Ur"""',
118 "uR'''", 'uR"""', "UR'''", 'UR"""'):
119 triple_quoted[t] = t
120single_quoted = {}
121for t in ("'", '"',
122 "r'", 'r"', "R'", 'R"',
123 "u'", 'u"', "U'", 'U"',
124 "ur'", 'ur"', "Ur'", 'Ur"',
125 "uR'", 'uR"', "UR'", 'UR"' ):
126 single_quoted[t] = t
127
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000128tabsize = 8
Fred Drake9b8d8012000-08-17 04:45:13 +0000129
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000130class TokenError(Exception): pass
131
132class StopTokenizing(Exception): pass
Fred Drake9b8d8012000-08-17 04:45:13 +0000133
Guido van Rossum1aec3231997-04-08 14:24:39 +0000134def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
Guido van Rossumbe19ed72007-02-09 05:37:30 +0000135 print("%d,%d-%d,%d:\t%s\t%s" % \
136 (srow, scol, erow, ecol, tok_name[type], repr(token)))
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000137
Guido van Rossum1aec3231997-04-08 14:24:39 +0000138def tokenize(readline, tokeneater=printtoken):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000139 """
140 The tokenize() function accepts two parameters: one representing the
141 input stream, and one providing an output mechanism for tokenize().
Tim Peters8ac14952002-05-23 15:15:30 +0000142
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000143 The first parameter, readline, must be a callable object which provides
144 the same interface as the readline() method of built-in file objects.
Tim Peters8ac14952002-05-23 15:15:30 +0000145 Each call to the function should return one line of input as a string.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000146
147 The second parameter, tokeneater, must also be a callable object. It is
148 called once for each token, with five arguments, corresponding to the
Tim Peters8ac14952002-05-23 15:15:30 +0000149 tuples generated by generate_tokens().
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000150 """
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000151 try:
152 tokenize_loop(readline, tokeneater)
153 except StopTokenizing:
154 pass
155
Tim Peters4efb6e92001-06-29 23:51:08 +0000156# backwards compatible interface
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000157def tokenize_loop(readline, tokeneater):
Tim Peters5ca576e2001-06-18 22:08:13 +0000158 for token_info in generate_tokens(readline):
Guido van Rossum68468eb2003-02-27 20:14:51 +0000159 tokeneater(*token_info)
Tim Peters5ca576e2001-06-18 22:08:13 +0000160
Thomas Wouters89f507f2006-12-13 04:49:30 +0000161class Untokenizer:
162
163 def __init__(self):
164 self.tokens = []
165 self.prev_row = 1
166 self.prev_col = 0
167
168 def add_whitespace(self, start):
169 row, col = start
170 assert row <= self.prev_row
171 col_offset = col - self.prev_col
172 if col_offset:
173 self.tokens.append(" " * col_offset)
174
175 def untokenize(self, iterable):
176 for t in iterable:
177 if len(t) == 2:
178 self.compat(t, iterable)
179 break
180 tok_type, token, start, end, line = t
181 self.add_whitespace(start)
182 self.tokens.append(token)
183 self.prev_row, self.prev_col = end
184 if tok_type in (NEWLINE, NL):
185 self.prev_row += 1
186 self.prev_col = 0
187 return "".join(self.tokens)
188
189 def compat(self, token, iterable):
190 startline = False
191 indents = []
192 toks_append = self.tokens.append
193 toknum, tokval = token
194 if toknum in (NAME, NUMBER):
195 tokval += ' '
196 if toknum in (NEWLINE, NL):
197 startline = True
198 for tok in iterable:
199 toknum, tokval = tok[:2]
200
201 if toknum in (NAME, NUMBER):
202 tokval += ' '
203
204 if toknum == INDENT:
205 indents.append(tokval)
206 continue
207 elif toknum == DEDENT:
208 indents.pop()
209 continue
210 elif toknum in (NEWLINE, NL):
211 startline = True
212 elif startline and indents:
213 toks_append(indents[-1])
214 startline = False
215 toks_append(tokval)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000216
217def untokenize(iterable):
218 """Transform tokens back into Python source code.
219
220 Each element returned by the iterable must be a token sequence
Thomas Wouters89f507f2006-12-13 04:49:30 +0000221 with at least two elements, a token number and token value. If
222 only two tokens are passed, the resulting output is poor.
Raymond Hettinger68c04532005-06-10 11:05:19 +0000223
Thomas Wouters89f507f2006-12-13 04:49:30 +0000224 Round-trip invariant for full input:
225 Untokenized source will match input source exactly
226
227 Round-trip invariant for limited intput:
Raymond Hettinger68c04532005-06-10 11:05:19 +0000228 # Output text will tokenize the back to the input
229 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
230 newcode = untokenize(t1)
Georg Brandla18af4e2007-04-21 15:47:16 +0000231 readline = iter(newcode.splitlines(1)).__next__
Raymond Hettinger68c04532005-06-10 11:05:19 +0000232 t2 = [tok[:2] for tokin generate_tokens(readline)]
233 assert t1 == t2
234 """
Thomas Wouters89f507f2006-12-13 04:49:30 +0000235 ut = Untokenizer()
236 return ut.untokenize(iterable)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000237
Tim Peters5ca576e2001-06-18 22:08:13 +0000238def generate_tokens(readline):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000239 """
240 The generate_tokens() generator requires one argment, readline, which
241 must be a callable object which provides the same interface as the
242 readline() method of built-in file objects. Each call to the function
Raymond Hettinger68c04532005-06-10 11:05:19 +0000243 should return one line of input as a string. Alternately, readline
244 can be a callable function terminating with StopIteration:
Georg Brandla18af4e2007-04-21 15:47:16 +0000245 readline = open(myfile).__next__ # Example of alternate readline
Tim Peters8ac14952002-05-23 15:15:30 +0000246
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000247 The generator produces 5-tuples with these members: the token type; the
248 token string; a 2-tuple (srow, scol) of ints specifying the row and
249 column where the token begins in the source; a 2-tuple (erow, ecol) of
250 ints specifying the row and column where the token ends in the source;
251 and the line on which the token was found. The line passed is the
Tim Peters8ac14952002-05-23 15:15:30 +0000252 logical line; continuation lines are included.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000253 """
Guido van Rossum1aec3231997-04-08 14:24:39 +0000254 lnum = parenlev = continued = 0
Fred Drake79e75e12001-07-20 19:05:50 +0000255 namechars, numchars = string.ascii_letters + '_', '0123456789'
Guido van Rossumde655271997-04-09 17:15:54 +0000256 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000257 contline = None
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000258 indents = [0]
Guido van Rossum1aec3231997-04-08 14:24:39 +0000259
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000260 while 1: # loop over lines in stream
Raymond Hettinger68c04532005-06-10 11:05:19 +0000261 try:
262 line = readline()
263 except StopIteration:
264 line = ''
Guido van Rossum1aec3231997-04-08 14:24:39 +0000265 lnum = lnum + 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000266 pos, max = 0, len(line)
267
268 if contstr: # continued string
Guido van Rossumde655271997-04-09 17:15:54 +0000269 if not line:
270 raise TokenError, ("EOF in multi-line string", strstart)
Guido van Rossum3b631771997-10-27 20:44:15 +0000271 endmatch = endprog.match(line)
272 if endmatch:
273 pos = end = endmatch.end(0)
Tim Peters5ca576e2001-06-18 22:08:13 +0000274 yield (STRING, contstr + line[:end],
Thomas Wouters89f507f2006-12-13 04:49:30 +0000275 strstart, (lnum, end), contline + line)
Guido van Rossumde655271997-04-09 17:15:54 +0000276 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000277 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000278 elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
Tim Peters5ca576e2001-06-18 22:08:13 +0000279 yield (ERRORTOKEN, contstr + line,
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000280 strstart, (lnum, len(line)), contline)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000281 contstr = ''
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000282 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000283 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000284 else:
285 contstr = contstr + line
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000286 contline = contline + line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000287 continue
288
Guido van Rossum1aec3231997-04-08 14:24:39 +0000289 elif parenlev == 0 and not continued: # new statement
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000290 if not line: break
291 column = 0
Guido van Rossum1aec3231997-04-08 14:24:39 +0000292 while pos < max: # measure leading whitespace
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000293 if line[pos] == ' ': column = column + 1
Guido van Rossum1aec3231997-04-08 14:24:39 +0000294 elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000295 elif line[pos] == '\f': column = 0
296 else: break
297 pos = pos + 1
Guido van Rossumde655271997-04-09 17:15:54 +0000298 if pos == max: break
Guido van Rossum1aec3231997-04-08 14:24:39 +0000299
300 if line[pos] in '#\r\n': # skip comments or blank lines
Thomas Wouters89f507f2006-12-13 04:49:30 +0000301 if line[pos] == '#':
302 comment_token = line[pos:].rstrip('\r\n')
303 nl_pos = pos + len(comment_token)
304 yield (COMMENT, comment_token,
305 (lnum, pos), (lnum, pos + len(comment_token)), line)
306 yield (NL, line[nl_pos:],
307 (lnum, nl_pos), (lnum, len(line)), line)
308 else:
309 yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
Guido van Rossum1aec3231997-04-08 14:24:39 +0000310 (lnum, pos), (lnum, len(line)), line)
311 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000312
313 if column > indents[-1]: # count indents or dedents
314 indents.append(column)
Tim Peters5ca576e2001-06-18 22:08:13 +0000315 yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000316 while column < indents[-1]:
Raymond Hettingerda99d1c2005-06-21 07:43:58 +0000317 if column not in indents:
318 raise IndentationError(
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000319 "unindent does not match any outer indentation level",
320 ("<tokenize>", lnum, pos, line))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000321 indents = indents[:-1]
Tim Peters5ca576e2001-06-18 22:08:13 +0000322 yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000323
324 else: # continued statement
Guido van Rossumde655271997-04-09 17:15:54 +0000325 if not line:
326 raise TokenError, ("EOF in multi-line statement", (lnum, 0))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000327 continued = 0
328
329 while pos < max:
Guido van Rossum3b631771997-10-27 20:44:15 +0000330 pseudomatch = pseudoprog.match(line, pos)
331 if pseudomatch: # scan for tokens
332 start, end = pseudomatch.span(1)
Guido van Rossumde655271997-04-09 17:15:54 +0000333 spos, epos, pos = (lnum, start), (lnum, end), end
Guido van Rossum1aec3231997-04-08 14:24:39 +0000334 token, initial = line[start:end], line[start]
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000335
Georg Brandldde00282007-03-18 19:01:53 +0000336 if (initial in numchars or # ordinary number
337 (initial == '.' and token != '.' and token != '...')):
Tim Peters5ca576e2001-06-18 22:08:13 +0000338 yield (NUMBER, token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000339 elif initial in '\r\n':
Thomas Wouters89f507f2006-12-13 04:49:30 +0000340 yield (NL if parenlev > 0 else NEWLINE,
341 token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000342 elif initial == '#':
Thomas Wouters89f507f2006-12-13 04:49:30 +0000343 assert not token.endswith("\n")
Tim Peters5ca576e2001-06-18 22:08:13 +0000344 yield (COMMENT, token, spos, epos, line)
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000345 elif token in triple_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000346 endprog = endprogs[token]
Guido van Rossum3b631771997-10-27 20:44:15 +0000347 endmatch = endprog.match(line, pos)
348 if endmatch: # all on one line
349 pos = endmatch.end(0)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000350 token = line[start:pos]
Tim Peters5ca576e2001-06-18 22:08:13 +0000351 yield (STRING, token, spos, (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000352 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000353 strstart = (lnum, start) # multiple lines
354 contstr = line[start:]
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000355 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000356 break
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000357 elif initial in single_quoted or \
358 token[:2] in single_quoted or \
359 token[:3] in single_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000360 if token[-1] == '\n': # continued string
Guido van Rossum1aec3231997-04-08 14:24:39 +0000361 strstart = (lnum, start)
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000362 endprog = (endprogs[initial] or endprogs[token[1]] or
363 endprogs[token[2]])
Guido van Rossumde655271997-04-09 17:15:54 +0000364 contstr, needcont = line[start:], 1
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000365 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000366 break
367 else: # ordinary string
Tim Peters5ca576e2001-06-18 22:08:13 +0000368 yield (STRING, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000369 elif initial in namechars: # ordinary name
Tim Peters5ca576e2001-06-18 22:08:13 +0000370 yield (NAME, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000371 elif initial == '\\': # continued stmt
372 continued = 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000373 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000374 if initial in '([{': parenlev = parenlev + 1
375 elif initial in ')]}': parenlev = parenlev - 1
Tim Peters5ca576e2001-06-18 22:08:13 +0000376 yield (OP, token, spos, epos, line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000377 else:
Tim Peters5ca576e2001-06-18 22:08:13 +0000378 yield (ERRORTOKEN, line[pos],
Guido van Rossumde655271997-04-09 17:15:54 +0000379 (lnum, pos), (lnum, pos+1), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000380 pos = pos + 1
381
382 for indent in indents[1:]: # pop remaining indent levels
Tim Peters5ca576e2001-06-18 22:08:13 +0000383 yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
384 yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000385
386if __name__ == '__main__': # testing
387 import sys
Guido van Rossumde655271997-04-09 17:15:54 +0000388 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
Guido van Rossum2b1566b1997-06-03 22:05:15 +0000389 else: tokenize(sys.stdin.readline)