blob: 9ea57b6b680dc59dc54192d468eb93a497f439b6 [file] [log] [blame]
Guido van Rossumb51eaa11997-03-07 00:21:55 +00001"""Tokenization help for Python programs.
Guido van Rossum4d8e8591992-01-01 19:34:47 +00002
Tim Peters4efb6e92001-06-29 23:51:08 +00003generate_tokens(readline) is a generator that breaks a stream of
Guido van Rossum1aec3231997-04-08 14:24:39 +00004text into Python tokens. It accepts a readline-like method which is called
Tim Peters4efb6e92001-06-29 23:51:08 +00005repeatedly to get the next line of input (or "" for EOF). It generates
65-tuples with these members:
7
8 the token type (see token.py)
9 the token (a string)
10 the starting (row, column) indices of the token (a 2-tuple of ints)
11 the ending (row, column) indices of the token (a 2-tuple of ints)
12 the original line (string)
13
14It is designed to match the working of the Python tokenizer exactly, except
15that it produces COMMENT tokens for comments and gives type OP for all
16operators
17
18Older entry points
19 tokenize_loop(readline, tokeneater)
20 tokenize(readline, tokeneater=printtoken)
21are the same, except instead of generating tokens, tokeneater is a callback
22function to which the 5 fields described above are passed as 5 arguments,
23each time a new token is found."""
Guido van Rossumb51eaa11997-03-07 00:21:55 +000024
Ka-Ping Yee244c5932001-03-01 13:56:40 +000025__author__ = 'Ka-Ping Yee <ping@lfw.org>'
Ka-Ping Yee4f64c132001-03-01 17:11:17 +000026__credits__ = \
Thomas Wouters89f507f2006-12-13 04:49:30 +000027 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro, Raymond Hettinger'
Guido van Rossumb51eaa11997-03-07 00:21:55 +000028
Guido van Rossum3b631771997-10-27 20:44:15 +000029import string, re
Guido van Rossumfc6f5331997-03-07 00:21:12 +000030from token import *
Guido van Rossum4d8e8591992-01-01 19:34:47 +000031
Skip Montanaro40fc1602001-03-01 04:27:19 +000032import token
Raymond Hettinger78a7aee2002-11-05 06:06:02 +000033__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
Raymond Hettinger68c04532005-06-10 11:05:19 +000034 "generate_tokens", "NL", "untokenize"]
Skip Montanaro40fc1602001-03-01 04:27:19 +000035del token
36
Guido van Rossum1aec3231997-04-08 14:24:39 +000037COMMENT = N_TOKENS
38tok_name[COMMENT] = 'COMMENT'
Guido van Rossuma90c78b1998-04-03 16:05:38 +000039NL = N_TOKENS + 1
40tok_name[NL] = 'NL'
Skip Montanaro40fc1602001-03-01 04:27:19 +000041N_TOKENS += 2
Guido van Rossum1aec3231997-04-08 14:24:39 +000042
Eric S. Raymondb08b2d32001-02-09 11:10:16 +000043def group(*choices): return '(' + '|'.join(choices) + ')'
Guido van Rossum68468eb2003-02-27 20:14:51 +000044def any(*choices): return group(*choices) + '*'
45def maybe(*choices): return group(*choices) + '?'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000046
Guido van Rossum3b631771997-10-27 20:44:15 +000047Whitespace = r'[ \f\t]*'
48Comment = r'#[^\r\n]*'
49Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
50Name = r'[a-zA-Z_]\w*'
Guido van Rossum4d8e8591992-01-01 19:34:47 +000051
Guido van Rossumcd16bf62007-06-13 18:07:49 +000052Hexnumber = r'0[xX][\da-fA-F]*'
53Binnumber = r'0[bB][01]*'
54Octnumber = r'0[oO][0-7]*'
55Decnumber = r'(?:0+|[1-9]\d*)'
56Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Guido van Rossum3b631771997-10-27 20:44:15 +000057Exponent = r'[eE][-+]?\d+'
58Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Tim Petersd507dab2001-08-30 20:51:59 +000059Expfloat = r'\d+' + Exponent
Guido van Rossum1aec3231997-04-08 14:24:39 +000060Floatnumber = group(Pointfloat, Expfloat)
Tim Petersd507dab2001-08-30 20:51:59 +000061Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Guido van Rossum1aec3231997-04-08 14:24:39 +000062Number = group(Imagnumber, Floatnumber, Intnumber)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000063
Tim Petersde495832000-10-07 05:09:39 +000064# Tail end of ' string.
65Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
66# Tail end of " string.
67Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
68# Tail end of ''' string.
69Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
70# Tail end of """ string.
71Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000072Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
Tim Petersde495832000-10-07 05:09:39 +000073# Single-line ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000074String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
75 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
Guido van Rossum4d8e8591992-01-01 19:34:47 +000076
Tim Petersde495832000-10-07 05:09:39 +000077# Because of leftmost-then-longest match semantics, be sure to put the
78# longest operators first (e.g., if = came before ==, == would get
79# recognized as two instances of =).
Guido van Rossumb053cd82006-08-24 03:53:23 +000080Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
Neal Norwitzc1505362006-12-28 06:47:50 +000081 r"//=?", r"->",
Tim Petersde495832000-10-07 05:09:39 +000082 r"[+\-*/%&|^=<>]=?",
83 r"~")
Thomas Wouterse1519a12000-08-24 21:44:52 +000084
Guido van Rossum4d8e8591992-01-01 19:34:47 +000085Bracket = '[][(){}]'
Georg Brandldde00282007-03-18 19:01:53 +000086Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Guido van Rossumfc6f5331997-03-07 00:21:12 +000087Funny = group(Operator, Bracket, Special)
Guido van Rossum4d8e8591992-01-01 19:34:47 +000088
Guido van Rossum3b631771997-10-27 20:44:15 +000089PlainToken = group(Number, Funny, String, Name)
Guido van Rossumfc6f5331997-03-07 00:21:12 +000090Token = Ignore + PlainToken
Guido van Rossum4d8e8591992-01-01 19:34:47 +000091
Tim Petersde495832000-10-07 05:09:39 +000092# First (or only) line of ' or " string.
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +000093ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
94 group("'", r'\\\r?\n'),
95 r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
96 group('"', r'\\\r?\n'))
Guido van Rossum3b631771997-10-27 20:44:15 +000097PseudoExtras = group(r'\\\r?\n', Comment, Triple)
98PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
Guido van Rossum1aec3231997-04-08 14:24:39 +000099
Guido van Rossum3b631771997-10-27 20:44:15 +0000100tokenprog, pseudoprog, single3prog, double3prog = map(
101 re.compile, (Token, PseudoToken, Single3, Double3))
Guido van Rossumfefc9221997-10-27 21:17:24 +0000102endprogs = {"'": re.compile(Single), '"': re.compile(Double),
Guido van Rossum3b631771997-10-27 20:44:15 +0000103 "'''": single3prog, '"""': double3prog,
Guido van Rossumfefc9221997-10-27 21:17:24 +0000104 "r'''": single3prog, 'r"""': double3prog,
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000105 "u'''": single3prog, 'u"""': double3prog,
106 "ur'''": single3prog, 'ur"""': double3prog,
107 "R'''": single3prog, 'R"""': double3prog,
108 "U'''": single3prog, 'U"""': double3prog,
109 "uR'''": single3prog, 'uR"""': double3prog,
110 "Ur'''": single3prog, 'Ur"""': double3prog,
111 "UR'''": single3prog, 'UR"""': double3prog,
112 'r': None, 'R': None, 'u': None, 'U': None}
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000113
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000114triple_quoted = {}
115for t in ("'''", '"""',
116 "r'''", 'r"""', "R'''", 'R"""',
117 "u'''", 'u"""', "U'''", 'U"""',
118 "ur'''", 'ur"""', "Ur'''", 'Ur"""',
119 "uR'''", 'uR"""', "UR'''", 'UR"""'):
120 triple_quoted[t] = t
121single_quoted = {}
122for t in ("'", '"',
123 "r'", 'r"', "R'", 'R"',
124 "u'", 'u"', "U'", 'U"',
125 "ur'", 'ur"', "Ur'", 'Ur"',
126 "uR'", 'uR"', "UR'", 'UR"' ):
127 single_quoted[t] = t
128
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000129tabsize = 8
Fred Drake9b8d8012000-08-17 04:45:13 +0000130
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000131class TokenError(Exception): pass
132
133class StopTokenizing(Exception): pass
Fred Drake9b8d8012000-08-17 04:45:13 +0000134
Guido van Rossum1bc535d2007-05-15 18:46:22 +0000135def printtoken(type, token, startrowcol, endrowcol, line): # for testing
136 (srow, scol), (erow, ecol) = startrowcol, endrowcol
Guido van Rossumbe19ed72007-02-09 05:37:30 +0000137 print("%d,%d-%d,%d:\t%s\t%s" % \
138 (srow, scol, erow, ecol, tok_name[type], repr(token)))
Guido van Rossum4d8e8591992-01-01 19:34:47 +0000139
Guido van Rossum1aec3231997-04-08 14:24:39 +0000140def tokenize(readline, tokeneater=printtoken):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000141 """
142 The tokenize() function accepts two parameters: one representing the
143 input stream, and one providing an output mechanism for tokenize().
Tim Peters8ac14952002-05-23 15:15:30 +0000144
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000145 The first parameter, readline, must be a callable object which provides
146 the same interface as the readline() method of built-in file objects.
Tim Peters8ac14952002-05-23 15:15:30 +0000147 Each call to the function should return one line of input as a string.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000148
149 The second parameter, tokeneater, must also be a callable object. It is
150 called once for each token, with five arguments, corresponding to the
Tim Peters8ac14952002-05-23 15:15:30 +0000151 tuples generated by generate_tokens().
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000152 """
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000153 try:
154 tokenize_loop(readline, tokeneater)
155 except StopTokenizing:
156 pass
157
Tim Peters4efb6e92001-06-29 23:51:08 +0000158# backwards compatible interface
Ka-Ping Yee28c62bb2001-03-23 05:22:49 +0000159def tokenize_loop(readline, tokeneater):
Tim Peters5ca576e2001-06-18 22:08:13 +0000160 for token_info in generate_tokens(readline):
Guido van Rossum68468eb2003-02-27 20:14:51 +0000161 tokeneater(*token_info)
Tim Peters5ca576e2001-06-18 22:08:13 +0000162
Thomas Wouters89f507f2006-12-13 04:49:30 +0000163class Untokenizer:
164
165 def __init__(self):
166 self.tokens = []
167 self.prev_row = 1
168 self.prev_col = 0
169
170 def add_whitespace(self, start):
171 row, col = start
172 assert row <= self.prev_row
173 col_offset = col - self.prev_col
174 if col_offset:
175 self.tokens.append(" " * col_offset)
176
177 def untokenize(self, iterable):
178 for t in iterable:
179 if len(t) == 2:
180 self.compat(t, iterable)
181 break
182 tok_type, token, start, end, line = t
183 self.add_whitespace(start)
184 self.tokens.append(token)
185 self.prev_row, self.prev_col = end
186 if tok_type in (NEWLINE, NL):
187 self.prev_row += 1
188 self.prev_col = 0
189 return "".join(self.tokens)
190
191 def compat(self, token, iterable):
192 startline = False
193 indents = []
194 toks_append = self.tokens.append
195 toknum, tokval = token
196 if toknum in (NAME, NUMBER):
197 tokval += ' '
198 if toknum in (NEWLINE, NL):
199 startline = True
200 for tok in iterable:
201 toknum, tokval = tok[:2]
202
203 if toknum in (NAME, NUMBER):
204 tokval += ' '
205
206 if toknum == INDENT:
207 indents.append(tokval)
208 continue
209 elif toknum == DEDENT:
210 indents.pop()
211 continue
212 elif toknum in (NEWLINE, NL):
213 startline = True
214 elif startline and indents:
215 toks_append(indents[-1])
216 startline = False
217 toks_append(tokval)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000218
219def untokenize(iterable):
220 """Transform tokens back into Python source code.
221
222 Each element returned by the iterable must be a token sequence
Thomas Wouters89f507f2006-12-13 04:49:30 +0000223 with at least two elements, a token number and token value. If
224 only two tokens are passed, the resulting output is poor.
Raymond Hettinger68c04532005-06-10 11:05:19 +0000225
Thomas Wouters89f507f2006-12-13 04:49:30 +0000226 Round-trip invariant for full input:
227 Untokenized source will match input source exactly
228
229 Round-trip invariant for limited intput:
Raymond Hettinger68c04532005-06-10 11:05:19 +0000230 # Output text will tokenize the back to the input
231 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
232 newcode = untokenize(t1)
Georg Brandla18af4e2007-04-21 15:47:16 +0000233 readline = iter(newcode.splitlines(1)).__next__
Raymond Hettinger68c04532005-06-10 11:05:19 +0000234 t2 = [tok[:2] for tokin generate_tokens(readline)]
235 assert t1 == t2
236 """
Thomas Wouters89f507f2006-12-13 04:49:30 +0000237 ut = Untokenizer()
238 return ut.untokenize(iterable)
Raymond Hettinger68c04532005-06-10 11:05:19 +0000239
Tim Peters5ca576e2001-06-18 22:08:13 +0000240def generate_tokens(readline):
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000241 """
242 The generate_tokens() generator requires one argment, readline, which
243 must be a callable object which provides the same interface as the
244 readline() method of built-in file objects. Each call to the function
Raymond Hettinger68c04532005-06-10 11:05:19 +0000245 should return one line of input as a string. Alternately, readline
246 can be a callable function terminating with StopIteration:
Georg Brandla18af4e2007-04-21 15:47:16 +0000247 readline = open(myfile).__next__ # Example of alternate readline
Tim Peters8ac14952002-05-23 15:15:30 +0000248
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000249 The generator produces 5-tuples with these members: the token type; the
250 token string; a 2-tuple (srow, scol) of ints specifying the row and
251 column where the token begins in the source; a 2-tuple (erow, ecol) of
252 ints specifying the row and column where the token ends in the source;
253 and the line on which the token was found. The line passed is the
Tim Peters8ac14952002-05-23 15:15:30 +0000254 logical line; continuation lines are included.
Raymond Hettingerd1fa3db2002-05-15 02:56:03 +0000255 """
Guido van Rossum1aec3231997-04-08 14:24:39 +0000256 lnum = parenlev = continued = 0
Fred Drake79e75e12001-07-20 19:05:50 +0000257 namechars, numchars = string.ascii_letters + '_', '0123456789'
Guido van Rossumde655271997-04-09 17:15:54 +0000258 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000259 contline = None
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000260 indents = [0]
Guido van Rossum1aec3231997-04-08 14:24:39 +0000261
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000262 while 1: # loop over lines in stream
Raymond Hettinger68c04532005-06-10 11:05:19 +0000263 try:
264 line = readline()
265 except StopIteration:
266 line = ''
Guido van Rossum1aec3231997-04-08 14:24:39 +0000267 lnum = lnum + 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000268 pos, max = 0, len(line)
269
270 if contstr: # continued string
Guido van Rossumde655271997-04-09 17:15:54 +0000271 if not line:
272 raise TokenError, ("EOF in multi-line string", strstart)
Guido van Rossum3b631771997-10-27 20:44:15 +0000273 endmatch = endprog.match(line)
274 if endmatch:
275 pos = end = endmatch.end(0)
Tim Peters5ca576e2001-06-18 22:08:13 +0000276 yield (STRING, contstr + line[:end],
Thomas Wouters89f507f2006-12-13 04:49:30 +0000277 strstart, (lnum, end), contline + line)
Guido van Rossumde655271997-04-09 17:15:54 +0000278 contstr, needcont = '', 0
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000279 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000280 elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
Tim Peters5ca576e2001-06-18 22:08:13 +0000281 yield (ERRORTOKEN, contstr + line,
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000282 strstart, (lnum, len(line)), contline)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000283 contstr = ''
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000284 contline = None
Guido van Rossumde655271997-04-09 17:15:54 +0000285 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000286 else:
287 contstr = contstr + line
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000288 contline = contline + line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000289 continue
290
Guido van Rossum1aec3231997-04-08 14:24:39 +0000291 elif parenlev == 0 and not continued: # new statement
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000292 if not line: break
293 column = 0
Guido van Rossum1aec3231997-04-08 14:24:39 +0000294 while pos < max: # measure leading whitespace
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000295 if line[pos] == ' ': column = column + 1
Guido van Rossum1aec3231997-04-08 14:24:39 +0000296 elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000297 elif line[pos] == '\f': column = 0
298 else: break
299 pos = pos + 1
Guido van Rossumde655271997-04-09 17:15:54 +0000300 if pos == max: break
Guido van Rossum1aec3231997-04-08 14:24:39 +0000301
302 if line[pos] in '#\r\n': # skip comments or blank lines
Thomas Wouters89f507f2006-12-13 04:49:30 +0000303 if line[pos] == '#':
304 comment_token = line[pos:].rstrip('\r\n')
305 nl_pos = pos + len(comment_token)
306 yield (COMMENT, comment_token,
307 (lnum, pos), (lnum, pos + len(comment_token)), line)
308 yield (NL, line[nl_pos:],
309 (lnum, nl_pos), (lnum, len(line)), line)
310 else:
311 yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
Guido van Rossum1aec3231997-04-08 14:24:39 +0000312 (lnum, pos), (lnum, len(line)), line)
313 continue
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000314
315 if column > indents[-1]: # count indents or dedents
316 indents.append(column)
Tim Peters5ca576e2001-06-18 22:08:13 +0000317 yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000318 while column < indents[-1]:
Raymond Hettingerda99d1c2005-06-21 07:43:58 +0000319 if column not in indents:
320 raise IndentationError(
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000321 "unindent does not match any outer indentation level",
322 ("<tokenize>", lnum, pos, line))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000323 indents = indents[:-1]
Tim Peters5ca576e2001-06-18 22:08:13 +0000324 yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000325
326 else: # continued statement
Guido van Rossumde655271997-04-09 17:15:54 +0000327 if not line:
328 raise TokenError, ("EOF in multi-line statement", (lnum, 0))
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000329 continued = 0
330
331 while pos < max:
Guido van Rossum3b631771997-10-27 20:44:15 +0000332 pseudomatch = pseudoprog.match(line, pos)
333 if pseudomatch: # scan for tokens
334 start, end = pseudomatch.span(1)
Guido van Rossumde655271997-04-09 17:15:54 +0000335 spos, epos, pos = (lnum, start), (lnum, end), end
Guido van Rossum1aec3231997-04-08 14:24:39 +0000336 token, initial = line[start:end], line[start]
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000337
Georg Brandldde00282007-03-18 19:01:53 +0000338 if (initial in numchars or # ordinary number
339 (initial == '.' and token != '.' and token != '...')):
Tim Peters5ca576e2001-06-18 22:08:13 +0000340 yield (NUMBER, token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000341 elif initial in '\r\n':
Thomas Wouters89f507f2006-12-13 04:49:30 +0000342 yield (NL if parenlev > 0 else NEWLINE,
343 token, spos, epos, line)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000344 elif initial == '#':
Thomas Wouters89f507f2006-12-13 04:49:30 +0000345 assert not token.endswith("\n")
Tim Peters5ca576e2001-06-18 22:08:13 +0000346 yield (COMMENT, token, spos, epos, line)
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000347 elif token in triple_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000348 endprog = endprogs[token]
Guido van Rossum3b631771997-10-27 20:44:15 +0000349 endmatch = endprog.match(line, pos)
350 if endmatch: # all on one line
351 pos = endmatch.end(0)
Guido van Rossum1aec3231997-04-08 14:24:39 +0000352 token = line[start:pos]
Tim Peters5ca576e2001-06-18 22:08:13 +0000353 yield (STRING, token, spos, (lnum, pos), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000354 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000355 strstart = (lnum, start) # multiple lines
356 contstr = line[start:]
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000357 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000358 break
Guido van Rossum9d6897a2002-08-24 06:54:19 +0000359 elif initial in single_quoted or \
360 token[:2] in single_quoted or \
361 token[:3] in single_quoted:
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000362 if token[-1] == '\n': # continued string
Guido van Rossum1aec3231997-04-08 14:24:39 +0000363 strstart = (lnum, start)
Ka-Ping Yee1ff08b12001-01-15 22:04:30 +0000364 endprog = (endprogs[initial] or endprogs[token[1]] or
365 endprogs[token[2]])
Guido van Rossumde655271997-04-09 17:15:54 +0000366 contstr, needcont = line[start:], 1
Guido van Rossuma90c78b1998-04-03 16:05:38 +0000367 contline = line
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000368 break
369 else: # ordinary string
Tim Peters5ca576e2001-06-18 22:08:13 +0000370 yield (STRING, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000371 elif initial in namechars: # ordinary name
Tim Peters5ca576e2001-06-18 22:08:13 +0000372 yield (NAME, token, spos, epos, line)
Guido van Rossum3b631771997-10-27 20:44:15 +0000373 elif initial == '\\': # continued stmt
374 continued = 1
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000375 else:
Guido van Rossum1aec3231997-04-08 14:24:39 +0000376 if initial in '([{': parenlev = parenlev + 1
377 elif initial in ')]}': parenlev = parenlev - 1
Tim Peters5ca576e2001-06-18 22:08:13 +0000378 yield (OP, token, spos, epos, line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000379 else:
Tim Peters5ca576e2001-06-18 22:08:13 +0000380 yield (ERRORTOKEN, line[pos],
Guido van Rossumde655271997-04-09 17:15:54 +0000381 (lnum, pos), (lnum, pos+1), line)
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000382 pos = pos + 1
383
384 for indent in indents[1:]: # pop remaining indent levels
Tim Peters5ca576e2001-06-18 22:08:13 +0000385 yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
386 yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
Guido van Rossumfc6f5331997-03-07 00:21:12 +0000387
388if __name__ == '__main__': # testing
389 import sys
Guido van Rossumde655271997-04-09 17:15:54 +0000390 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
Guido van Rossum2b1566b1997-06-03 22:05:15 +0000391 else: tokenize(sys.stdin.readline)