blob: 790ab39fc463e5c42653105e42a27360a01b78dc [file] [log] [blame]
Eli Bendersky3b1b08d2012-06-15 12:37:54 +03001import re
2import sys
3import unittest
Eli Bendersky3921e8e2010-05-21 09:05:39 +03004
Eli Bendersky3b1b08d2012-06-15 12:37:54 +03005sys.path.insert(0, '..')
6from pycparser.c_lexer import CLexer
7
8
9def token_list(clex):
10 return list(iter(clex.token, None))
11
12
13def token_types(clex):
14 return [i.type for i in token_list(clex)]
15
16
17class TestCLexerNoErrors(unittest.TestCase):
18 """ Test lexing of strings that are not supposed to cause
19 errors. Therefore, the error_func passed to the lexer
20 raises an exception.
21 """
22 def error_func(self, msg, line, column):
23 self.fail(msg)
Eli Bendersky86f2eee2013-01-18 06:04:01 -080024
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030025 def type_lookup_func(self, typ):
26 if typ.startswith('mytype'):
27 return True
28 else:
29 return False
Eli Bendersky86f2eee2013-01-18 06:04:01 -080030
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030031 def setUp(self):
32 self.clex = CLexer(self.error_func, self.type_lookup_func)
33 self.clex.build(optimize=False)
Eli Bendersky86f2eee2013-01-18 06:04:01 -080034
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030035 def assertTokensTypes(self, str, types):
36 self.clex.input(str)
37 self.assertEqual(token_types(self.clex), types)
Eli Bendersky86f2eee2013-01-18 06:04:01 -080038
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030039 def test_trivial_tokens(self):
40 self.assertTokensTypes('1', ['INT_CONST_DEC'])
41 self.assertTokensTypes('-', ['MINUS'])
42 self.assertTokensTypes('volatile', ['VOLATILE'])
43 self.assertTokensTypes('...', ['ELLIPSIS'])
44 self.assertTokensTypes('++', ['PLUSPLUS'])
45 self.assertTokensTypes('case int', ['CASE', 'INT'])
46 self.assertTokensTypes('caseint', ['ID'])
Sye van der Veen3576ed12013-06-10 13:27:58 -040047 self.assertTokensTypes('$dollar cent$', ['ID', 'ID'])
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030048 self.assertTokensTypes('i ^= 1;', ['ID', 'XOREQUAL', 'INT_CONST_DEC', 'SEMI'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080049
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030050 def test_id_typeid(self):
51 self.assertTokensTypes('myt', ['ID'])
52 self.assertTokensTypes('mytype', ['TYPEID'])
53 self.assertTokensTypes('mytype6 var', ['TYPEID', 'ID'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080054
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030055 def test_integer_constants(self):
56 self.assertTokensTypes('12', ['INT_CONST_DEC'])
57 self.assertTokensTypes('12u', ['INT_CONST_DEC'])
Sye van der Veen08a54892013-06-10 12:59:03 -040058 self.assertTokensTypes('12l', ['INT_CONST_DEC'])
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030059 self.assertTokensTypes('199872Ul', ['INT_CONST_DEC'])
Sye van der Veen08a54892013-06-10 12:59:03 -040060 self.assertTokensTypes('199872lU', ['INT_CONST_DEC'])
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030061 self.assertTokensTypes('199872LL', ['INT_CONST_DEC'])
62 self.assertTokensTypes('199872ull', ['INT_CONST_DEC'])
Sye van der Veen08a54892013-06-10 12:59:03 -040063 self.assertTokensTypes('199872llu', ['INT_CONST_DEC'])
64 self.assertTokensTypes('1009843200000uLL', ['INT_CONST_DEC'])
65 self.assertTokensTypes('1009843200000LLu', ['INT_CONST_DEC'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080066
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030067 self.assertTokensTypes('077', ['INT_CONST_OCT'])
68 self.assertTokensTypes('0123456L', ['INT_CONST_OCT'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080069
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030070 self.assertTokensTypes('0xf7', ['INT_CONST_HEX'])
71 self.assertTokensTypes('0x01202AAbbf7Ul', ['INT_CONST_HEX'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080072
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030073 # no 0 before x, so ID catches it
74 self.assertTokensTypes('xf7', ['ID'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080075
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030076 # - is MINUS, the rest a constnant
77 self.assertTokensTypes('-1', ['MINUS', 'INT_CONST_DEC'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080078
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030079 def test_floating_constants(self):
80 self.assertTokensTypes('1.5f', ['FLOAT_CONST'])
81 self.assertTokensTypes('01.5', ['FLOAT_CONST'])
82 self.assertTokensTypes('.15L', ['FLOAT_CONST'])
83 self.assertTokensTypes('0.', ['FLOAT_CONST'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080084
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030085 # but just a period is a period
86 self.assertTokensTypes('.', ['PERIOD'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080087
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030088 self.assertTokensTypes('3.3e-3', ['FLOAT_CONST'])
89 self.assertTokensTypes('.7e25L', ['FLOAT_CONST'])
90 self.assertTokensTypes('6.e+125f', ['FLOAT_CONST'])
91 self.assertTokensTypes('666e666', ['FLOAT_CONST'])
92 self.assertTokensTypes('00666e+3', ['FLOAT_CONST'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -080093
Eli Bendersky3b1b08d2012-06-15 12:37:54 +030094 # but this is a hex integer + 3
95 self.assertTokensTypes('0x0666e+3', ['INT_CONST_HEX', 'PLUS', 'INT_CONST_DEC'])
96
97 def test_hexadecimal_floating_constants(self):
98 self.assertTokensTypes('0xDE.488641p0', ['HEX_FLOAT_CONST'])
99 self.assertTokensTypes('0x.488641p0', ['HEX_FLOAT_CONST'])
100 self.assertTokensTypes('0X12.P0', ['HEX_FLOAT_CONST'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800101
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300102 def test_char_constants(self):
103 self.assertTokensTypes(r"""'x'""", ['CHAR_CONST'])
104 self.assertTokensTypes(r"""L'x'""", ['WCHAR_CONST'])
105 self.assertTokensTypes(r"""'\t'""", ['CHAR_CONST'])
106 self.assertTokensTypes(r"""'\''""", ['CHAR_CONST'])
107 self.assertTokensTypes(r"""'\?'""", ['CHAR_CONST'])
108 self.assertTokensTypes(r"""'\012'""", ['CHAR_CONST'])
109 self.assertTokensTypes(r"""'\x2f'""", ['CHAR_CONST'])
110 self.assertTokensTypes(r"""'\x2f12'""", ['CHAR_CONST'])
111 self.assertTokensTypes(r"""L'\xaf'""", ['WCHAR_CONST'])
112
113 def test_string_literal(self):
114 self.assertTokensTypes('"a string"', ['STRING_LITERAL'])
115 self.assertTokensTypes('L"ing"', ['WSTRING_LITERAL'])
116 self.assertTokensTypes(
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800117 '"i am a string too \t"',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300118 ['STRING_LITERAL'])
119 self.assertTokensTypes(
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800120 r'''"esc\ape \"\'\? \0234 chars \rule"''',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300121 ['STRING_LITERAL'])
122 self.assertTokensTypes(
123 r'''"hello 'joe' wanna give it a \"go\"?"''',
124 ['STRING_LITERAL'])
125
126 def test_mess(self):
127 self.assertTokensTypes(
128 r'[{}]()',
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800129 ['LBRACKET',
130 'LBRACE', 'RBRACE',
131 'RBRACKET',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300132 'LPAREN', 'RPAREN'])
133
134 self.assertTokensTypes(
135 r'()||!C&~Z?J',
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800136 ['LPAREN', 'RPAREN',
137 'LOR',
138 'LNOT', 'ID',
139 'AND',
140 'NOT', 'ID',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300141 'CONDOP', 'ID'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800142
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300143 self.assertTokensTypes(
144 r'+-*/%|||&&&^><>=<===!=',
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800145 ['PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
146 'LOR', 'OR',
147 'LAND', 'AND',
148 'XOR',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300149 'GT', 'LT', 'GE', 'LE', 'EQ', 'NE'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800150
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300151 self.assertTokensTypes(
152 r'++--->?.,;:',
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800153 ['PLUSPLUS', 'MINUSMINUS',
154 'ARROW', 'CONDOP',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300155 'PERIOD', 'COMMA', 'SEMI', 'COLON'])
156
157 def test_exprs(self):
158 self.assertTokensTypes(
159 'bb-cc',
160 ['ID', 'MINUS', 'ID'])
161
162 self.assertTokensTypes(
163 'foo & 0xFF',
164 ['ID', 'AND', 'INT_CONST_HEX'])
165
166 self.assertTokensTypes(
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800167 '(2+k) * 62',
168 ['LPAREN', 'INT_CONST_DEC', 'PLUS', 'ID',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300169 'RPAREN', 'TIMES', 'INT_CONST_DEC'],)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800170
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300171 self.assertTokensTypes(
172 'x | y >> z',
173 ['ID', 'OR', 'ID', 'RSHIFT', 'ID'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800174
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300175 self.assertTokensTypes(
176 'x <<= z << 5',
177 ['ID', 'LSHIFTEQUAL', 'ID', 'LSHIFT', 'INT_CONST_DEC'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800178
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300179 self.assertTokensTypes(
180 'x = y > 0 ? y : -6',
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800181 ['ID', 'EQUALS',
182 'ID', 'GT', 'INT_CONST_OCT',
183 'CONDOP',
184 'ID',
185 'COLON',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300186 'MINUS', 'INT_CONST_DEC'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800187
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300188 self.assertTokensTypes(
189 'a+++b',
190 ['ID', 'PLUSPLUS', 'PLUS', 'ID'])
191
192 def test_statements(self):
193 self.assertTokensTypes(
194 'for (int i = 0; i < n; ++i)',
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800195 ['FOR', 'LPAREN',
196 'INT', 'ID', 'EQUALS', 'INT_CONST_OCT', 'SEMI',
197 'ID', 'LT', 'ID', 'SEMI',
198 'PLUSPLUS', 'ID',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300199 'RPAREN'])
200
201 self.assertTokensTypes(
202 'self: goto self;',
203 ['ID', 'COLON', 'GOTO', 'ID', 'SEMI'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800204
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300205 self.assertTokensTypes(
206 """ switch (typ)
207 {
208 case TYPE_ID:
209 m = 5;
210 break;
211 default:
212 m = 8;
213 }""",
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800214 ['SWITCH', 'LPAREN', 'ID', 'RPAREN',
215 'LBRACE',
216 'CASE', 'ID', 'COLON',
217 'ID', 'EQUALS', 'INT_CONST_DEC', 'SEMI',
218 'BREAK', 'SEMI',
219 'DEFAULT', 'COLON',
220 'ID', 'EQUALS', 'INT_CONST_DEC', 'SEMI',
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300221 'RBRACE'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800222
Eli Bendersky0373cbe2012-08-10 07:48:17 +0300223 def test_preprocessor_line(self):
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300224 self.assertTokensTypes('#abracadabra', ['PPHASH', 'ID'])
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800225
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300226 str = r"""
227 546
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800228 #line 66 "kwas\df.h"
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300229 id 4
230 dsf
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800231 # 9
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300232 armo
233 #line 10 "..\~..\test.h"
234 tok1
235 #line 99999 "include/me.h"
236 tok2
237 """
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800238
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300239 #~ self.clex.filename
240 self.clex.input(str)
241 self.clex.reset_lineno()
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800242
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300243 t1 = self.clex.token()
244 self.assertEqual(t1.type, 'INT_CONST_DEC')
245 self.assertEqual(t1.lineno, 2)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800246
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300247 t2 = self.clex.token()
248 self.assertEqual(t2.type, 'ID')
249 self.assertEqual(t2.value, 'id')
250 self.assertEqual(t2.lineno, 66)
251 self.assertEqual(self.clex.filename, r'kwas\df.h')
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800252
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300253 for i in range(3):
254 t = self.clex.token()
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800255
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300256 self.assertEqual(t.type, 'ID')
257 self.assertEqual(t.value, 'armo')
258 self.assertEqual(t.lineno, 9)
259 self.assertEqual(self.clex.filename, r'kwas\df.h')
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800260
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300261 t4 = self.clex.token()
262 self.assertEqual(t4.type, 'ID')
263 self.assertEqual(t4.value, 'tok1')
264 self.assertEqual(t4.lineno, 10)
265 self.assertEqual(self.clex.filename, r'..\~..\test.h')
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800266
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300267 t5 = self.clex.token()
268 self.assertEqual(t5.type, 'ID')
269 self.assertEqual(t5.value, 'tok2')
270 self.assertEqual(t5.lineno, 99999)
271 self.assertEqual(self.clex.filename, r'include/me.h')
Eli Bendersky09fc2002012-08-10 07:41:42 +0300272
Eli Bendersky0373cbe2012-08-10 07:48:17 +0300273 def test_preprocessor_line_funny(self):
274 str = r'''
275 #line 10 "..\6\joe.h"
276 10
277 '''
278 self.clex.input(str)
279 self.clex.reset_lineno()
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800280
Eli Bendersky0373cbe2012-08-10 07:48:17 +0300281 t1 = self.clex.token()
282 self.assertEqual(t1.type, 'INT_CONST_DEC')
283 self.assertEqual(t1.lineno, 10)
284 self.assertEqual(self.clex.filename, r'..\6\joe.h')
285
286
Eli Bendersky09fc2002012-08-10 07:41:42 +0300287 def test_preprocessor_pragma(self):
288 str = r'''
289 42
290 #pragma helo me
291 #pragma once
292 # pragma omp parallel private(th_id)
293 #pragma {pack: 2, smack: 3}
294 #pragma <includeme.h> "nowit.h"
295 #pragma "string"
296 #pragma id 124124 and numbers 0235495
297 59
298 '''
299
300 # Check that pragmas are ignored but the line number advances
301 self.clex.input(str)
302 self.clex.reset_lineno()
303
304 t1 = self.clex.token()
305 self.assertEqual(t1.type, 'INT_CONST_DEC')
306 t2 = self.clex.token()
307 self.assertEqual(t2.type, 'INT_CONST_DEC')
308 self.assertEqual(t2.lineno, 10)
309
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300310
311
312# Keeps all the errors the lexer spits in one place, to allow
313# easier modification if the error syntax changes.
314#
315ERR_ILLEGAL_CHAR = 'Illegal character'
316ERR_OCTAL = 'Invalid octal constant'
317ERR_UNMATCHED_QUOTE = 'Unmatched \''
318ERR_INVALID_CCONST = 'Invalid char constant'
319ERR_STRING_ESCAPE = 'String contains invalid escape'
320
321ERR_FILENAME_BEFORE_LINE = 'filename before line'
322ERR_LINENUM_MISSING = 'line number missing'
323ERR_INVALID_LINE_DIRECTIVE = 'invalid #line directive'
324
325
326class TestCLexerErrors(unittest.TestCase):
327 """ Test lexing of erroneous strings.
328 Works by passing an error functions that saves the error
329 in an attribute for later perusal.
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800330 """
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300331 def error_func(self, msg, line, column):
332 self.error = msg
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800333
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300334 def type_lookup_func(self, typ):
335 return False
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800336
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300337 def setUp(self):
338 self.clex = CLexer(self.error_func, self.type_lookup_func)
339 self.clex.build(optimize=False)
340 self.error = ""
341
342 def assertLexerError(self, str, error_like):
343 # feed the string to the lexer
344 self.clex.input(str)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800345
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300346 # Pulls all tokens from the string. Errors will
347 # be written into self.error by the error_func
348 # callback
349 #
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800350 token_types(self.clex)
351
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300352 # compare the error to the expected
Eli Bendersky09fc2002012-08-10 07:41:42 +0300353 self.assertTrue(re.search(error_like, self.error),
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800354 "\nExpected error matching: %s\nGot: %s" %
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300355 (error_like, self.error))
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800356
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300357 # clear last error, for the sake of subsequent invocations
358 self.error = ""
359
360 def test_trivial_tokens(self):
361 self.assertLexerError('@', ERR_ILLEGAL_CHAR)
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300362 self.assertLexerError('`', ERR_ILLEGAL_CHAR)
363 self.assertLexerError('\\', ERR_ILLEGAL_CHAR)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800364
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300365 def test_integer_constants(self):
366 self.assertLexerError('029', ERR_OCTAL)
367 self.assertLexerError('012345678', ERR_OCTAL)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800368
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300369 def test_char_constants(self):
370 self.assertLexerError("'", ERR_UNMATCHED_QUOTE)
371 self.assertLexerError("'b\n", ERR_UNMATCHED_QUOTE)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800372
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300373 self.assertLexerError("'jx'", ERR_INVALID_CCONST)
374 self.assertLexerError("'\*'", ERR_INVALID_CCONST)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800375
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300376 def test_string_literals(self):
377 self.assertLexerError('"jx\9"', ERR_STRING_ESCAPE)
378 self.assertLexerError('"hekllo\* on ix"', ERR_STRING_ESCAPE)
379 self.assertLexerError('L"hekllo\* on ix"', ERR_STRING_ESCAPE)
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800380
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300381 def test_preprocessor(self):
382 self.assertLexerError('#line "ka"', ERR_FILENAME_BEFORE_LINE)
383 self.assertLexerError('#line df', ERR_INVALID_LINE_DIRECTIVE)
384 self.assertLexerError('#line \n', ERR_LINENUM_MISSING)
385
386
387if __name__ == '__main__':
388 unittest.main()
Eli Bendersky86f2eee2013-01-18 06:04:01 -0800389
Eli Bendersky3b1b08d2012-06-15 12:37:54 +0300390