blob: d012d391e71a6fb60be71118e442e3b0a3197bb5 [file] [log] [blame]
Guido van Rossumd8faa362007-04-27 19:54:29 +00001/* Peephole optimizations for bytecode compiler. */
Thomas Wouters00ee7ba2006-08-21 19:07:27 +00002
3#include "Python.h"
4
5#include "Python-ast.h"
6#include "node.h"
7#include "pyarena.h"
8#include "ast.h"
9#include "code.h"
10#include "compile.h"
11#include "symtable.h"
12#include "opcode.h"
13
14#define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
15#define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
16#define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP)
17#define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
18#define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
19#define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)
20#define ISBASICBLOCK(blocks, start, bytes) \
21 (blocks[start]==blocks[start+bytes-1])
22
23/* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
24 with LOAD_CONST (c1, c2, ... cn).
25 The consts table must still be in list form so that the
26 new constant (c1, c2, ... cn) can be appended.
27 Called with codestr pointing to the first LOAD_CONST.
28 Bails out with no change if one or more of the LOAD_CONSTs is missing.
29 Also works for BUILD_LIST when followed by an "in" or "not in" test.
30*/
31static int
32tuple_of_constants(unsigned char *codestr, int n, PyObject *consts)
33{
34 PyObject *newconst, *constant;
35 Py_ssize_t i, arg, len_consts;
36
37 /* Pre-conditions */
38 assert(PyList_CheckExact(consts));
39 assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
40 assert(GETARG(codestr, (n*3)) == n);
41 for (i=0 ; i<n ; i++)
42 assert(codestr[i*3] == LOAD_CONST);
43
44 /* Buildup new tuple of constants */
45 newconst = PyTuple_New(n);
46 if (newconst == NULL)
47 return 0;
48 len_consts = PyList_GET_SIZE(consts);
49 for (i=0 ; i<n ; i++) {
50 arg = GETARG(codestr, (i*3));
51 assert(arg < len_consts);
52 constant = PyList_GET_ITEM(consts, arg);
53 Py_INCREF(constant);
54 PyTuple_SET_ITEM(newconst, i, constant);
55 }
56
57 /* Append folded constant onto consts */
58 if (PyList_Append(consts, newconst)) {
59 Py_DECREF(newconst);
60 return 0;
61 }
62 Py_DECREF(newconst);
63
64 /* Write NOPs over old LOAD_CONSTS and
65 add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
66 memset(codestr, NOP, n*3);
67 codestr[n*3] = LOAD_CONST;
68 SETARG(codestr, (n*3), len_consts);
69 return 1;
70}
71
72/* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
73 with LOAD_CONST binop(c1,c2)
74 The consts table must still be in list form so that the
75 new constant can be appended.
76 Called with codestr pointing to the first LOAD_CONST.
77 Abandons the transformation if the folding fails (i.e. 1+'a').
78 If the new constant is a sequence, only folds when the size
79 is below a threshold value. That keeps pyc files from
80 becoming large in the presence of code like: (None,)*1000.
81*/
82static int
83fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
84{
85 PyObject *newconst, *v, *w;
86 Py_ssize_t len_consts, size;
87 int opcode;
88
89 /* Pre-conditions */
90 assert(PyList_CheckExact(consts));
91 assert(codestr[0] == LOAD_CONST);
92 assert(codestr[3] == LOAD_CONST);
93
94 /* Create new constant */
95 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
96 w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
97 opcode = codestr[6];
98 switch (opcode) {
99 case BINARY_POWER:
100 newconst = PyNumber_Power(v, w, Py_None);
101 break;
102 case BINARY_MULTIPLY:
103 newconst = PyNumber_Multiply(v, w);
104 break;
105 case BINARY_TRUE_DIVIDE:
106 newconst = PyNumber_TrueDivide(v, w);
107 break;
108 case BINARY_FLOOR_DIVIDE:
109 newconst = PyNumber_FloorDivide(v, w);
110 break;
111 case BINARY_MODULO:
112 newconst = PyNumber_Remainder(v, w);
113 break;
114 case BINARY_ADD:
115 newconst = PyNumber_Add(v, w);
116 break;
117 case BINARY_SUBTRACT:
118 newconst = PyNumber_Subtract(v, w);
119 break;
120 case BINARY_SUBSCR:
121 newconst = PyObject_GetItem(v, w);
122 break;
123 case BINARY_LSHIFT:
124 newconst = PyNumber_Lshift(v, w);
125 break;
126 case BINARY_RSHIFT:
127 newconst = PyNumber_Rshift(v, w);
128 break;
129 case BINARY_AND:
130 newconst = PyNumber_And(v, w);
131 break;
132 case BINARY_XOR:
133 newconst = PyNumber_Xor(v, w);
134 break;
135 case BINARY_OR:
136 newconst = PyNumber_Or(v, w);
137 break;
138 default:
139 /* Called with an unknown opcode */
140 PyErr_Format(PyExc_SystemError,
141 "unexpected binary operation %d on a constant",
142 opcode);
143 return 0;
144 }
145 if (newconst == NULL) {
146 PyErr_Clear();
147 return 0;
148 }
149 size = PyObject_Size(newconst);
150 if (size == -1)
151 PyErr_Clear();
152 else if (size > 20) {
153 Py_DECREF(newconst);
154 return 0;
155 }
156
157 /* Append folded constant into consts table */
158 len_consts = PyList_GET_SIZE(consts);
159 if (PyList_Append(consts, newconst)) {
160 Py_DECREF(newconst);
161 return 0;
162 }
163 Py_DECREF(newconst);
164
165 /* Write NOP NOP NOP NOP LOAD_CONST newconst */
166 memset(codestr, NOP, 4);
167 codestr[4] = LOAD_CONST;
168 SETARG(codestr, 4, len_consts);
169 return 1;
170}
171
172static int
173fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
174{
175 PyObject *newconst=NULL, *v;
176 Py_ssize_t len_consts;
177 int opcode;
178
179 /* Pre-conditions */
180 assert(PyList_CheckExact(consts));
181 assert(codestr[0] == LOAD_CONST);
182
183 /* Create new constant */
184 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
185 opcode = codestr[3];
186 switch (opcode) {
187 case UNARY_NEGATIVE:
188 /* Preserve the sign of -0.0 */
189 if (PyObject_IsTrue(v) == 1)
190 newconst = PyNumber_Negative(v);
191 break;
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000192 case UNARY_INVERT:
193 newconst = PyNumber_Invert(v);
194 break;
195 default:
196 /* Called with an unknown opcode */
197 PyErr_Format(PyExc_SystemError,
198 "unexpected unary operation %d on a constant",
199 opcode);
200 return 0;
201 }
202 if (newconst == NULL) {
203 PyErr_Clear();
204 return 0;
205 }
206
207 /* Append folded constant into consts table */
208 len_consts = PyList_GET_SIZE(consts);
209 if (PyList_Append(consts, newconst)) {
210 Py_DECREF(newconst);
211 return 0;
212 }
213 Py_DECREF(newconst);
214
215 /* Write NOP LOAD_CONST newconst */
216 codestr[0] = NOP;
217 codestr[1] = LOAD_CONST;
218 SETARG(codestr, 1, len_consts);
219 return 1;
220}
221
222static unsigned int *
223markblocks(unsigned char *code, int len)
224{
225 unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
226 int i,j, opcode, blockcnt = 0;
227
228 if (blocks == NULL) {
229 PyErr_NoMemory();
230 return NULL;
231 }
232 memset(blocks, 0, len*sizeof(int));
233
234 /* Mark labels in the first pass */
235 for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
236 opcode = code[i];
237 switch (opcode) {
238 case FOR_ITER:
239 case JUMP_FORWARD:
240 case JUMP_IF_FALSE:
241 case JUMP_IF_TRUE:
242 case JUMP_ABSOLUTE:
243 case CONTINUE_LOOP:
244 case SETUP_LOOP:
245 case SETUP_EXCEPT:
246 case SETUP_FINALLY:
247 j = GETJUMPTGT(code, i);
248 blocks[j] = 1;
249 break;
250 }
251 }
252 /* Build block numbers in the second pass */
253 for (i=0 ; i<len ; i++) {
254 blockcnt += blocks[i]; /* increment blockcnt over labels */
255 blocks[i] = blockcnt;
256 }
257 return blocks;
258}
259
Guido van Rossumcd16bf62007-06-13 18:07:49 +0000260/* Helper to replace LOAD_NAME None/True/False with LOAD_CONST
261 Returns: 0 if no change, 1 if change, -1 if error */
262static int
263load_global(unsigned char *codestr, Py_ssize_t i, char *name, PyObject *consts)
264{
265 Py_ssize_t j;
266 PyObject *obj;
267 if (name == NULL)
268 return 0;
269 if (strcmp(name, "None") == 0)
270 obj = Py_None;
271 else if (strcmp(name, "True") == 0)
272 obj = Py_True;
273 else if (strcmp(name, "False") == 0)
274 obj = Py_False;
275 else
276 return 0;
277 for (j = 0; j < PyList_GET_SIZE(consts); j++) {
278 if (PyList_GET_ITEM(consts, j) == obj)
279 break;
280 }
281 if (j == PyList_GET_SIZE(consts)) {
282 if (PyList_Append(consts, obj) < 0)
283 return -1;
284 }
285 assert(PyList_GET_ITEM(consts, j) == obj);
286 codestr[i] = LOAD_CONST;
287 SETARG(codestr, i, j);
288 return 1;
289}
290
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000291/* Perform basic peephole optimizations to components of a code object.
292 The consts object should still be in list form to allow new constants
293 to be appended.
294
Guido van Rossum0240b922007-02-26 21:23:50 +0000295 To keep the optimizer simple, it bails out (does nothing) for code that
296 has a length over 32,700, and does not calculate extended arguments.
297 That allows us to avoid overflow and sign issues. Likewise, it bails when
298 the lineno table has complex encoding for gaps >= 255. EXTENDED_ARG can
299 appear before MAKE_FUNCTION; in this case both opcodes are skipped.
300 EXTENDED_ARG preceding any other opcode causes the optimizer to bail.
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000301
302 Optimizations are restricted to simple transformations occuring within a
303 single basic block. All transformations keep the code size the same or
304 smaller. For those that reduce size, the gaps are initially filled with
305 NOPs. Later those NOPs are removed and the jump addresses retargeted in
306 a single pass. Line numbering is adjusted accordingly. */
307
308PyObject *
309PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
310 PyObject *lineno_obj)
311{
312 Py_ssize_t i, j, codelen;
313 int nops, h, adj;
314 int tgt, tgttgt, opcode;
315 unsigned char *codestr = NULL;
316 unsigned char *lineno;
317 int *addrmap = NULL;
318 int new_line, cum_orig_line, last_line, tabsiz;
319 int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */
320 unsigned int *blocks = NULL;
321 char *name;
322
323 /* Bail out if an exception is set */
324 if (PyErr_Occurred())
325 goto exitUnchanged;
326
327 /* Bypass optimization when the lineno table is too complex */
328 assert(PyString_Check(lineno_obj));
329 lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);
330 tabsiz = PyString_GET_SIZE(lineno_obj);
331 if (memchr(lineno, 255, tabsiz) != NULL)
332 goto exitUnchanged;
333
334 /* Avoid situations where jump retargeting could overflow */
335 assert(PyString_Check(code));
Guido van Rossumcd16bf62007-06-13 18:07:49 +0000336 codelen = PyString_GET_SIZE(code);
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000337 if (codelen > 32700)
338 goto exitUnchanged;
339
340 /* Make a modifiable copy of the code string */
341 codestr = (unsigned char *)PyMem_Malloc(codelen);
342 if (codestr == NULL)
343 goto exitUnchanged;
344 codestr = (unsigned char *)memcpy(codestr,
345 PyString_AS_STRING(code), codelen);
346
347 /* Verify that RETURN_VALUE terminates the codestring. This allows
348 the various transformation patterns to look ahead several
349 instructions without additional checks to make sure they are not
350 looking beyond the end of the code string.
351 */
352 if (codestr[codelen-1] != RETURN_VALUE)
353 goto exitUnchanged;
354
355 /* Mapping to new jump targets after NOPs are removed */
356 addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
357 if (addrmap == NULL)
358 goto exitUnchanged;
359
360 blocks = markblocks(codestr, codelen);
361 if (blocks == NULL)
362 goto exitUnchanged;
363 assert(PyList_Check(consts));
364
365 for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
366 opcode = codestr[i];
367
368 lastlc = cumlc;
369 cumlc = 0;
370
371 switch (opcode) {
372
373 /* Replace UNARY_NOT JUMP_IF_FALSE POP_TOP with
374 with JUMP_IF_TRUE POP_TOP */
375 case UNARY_NOT:
376 if (codestr[i+1] != JUMP_IF_FALSE ||
377 codestr[i+4] != POP_TOP ||
378 !ISBASICBLOCK(blocks,i,5))
379 continue;
380 tgt = GETJUMPTGT(codestr, (i+1));
381 if (codestr[tgt] != POP_TOP)
382 continue;
383 j = GETARG(codestr, i+1) + 1;
384 codestr[i] = JUMP_IF_TRUE;
385 SETARG(codestr, i, j);
386 codestr[i+3] = POP_TOP;
387 codestr[i+4] = NOP;
388 break;
389
390 /* not a is b --> a is not b
391 not a in b --> a not in b
392 not a is not b --> a is b
393 not a not in b --> a in b
394 */
395 case COMPARE_OP:
396 j = GETARG(codestr, i);
397 if (j < 6 || j > 9 ||
398 codestr[i+3] != UNARY_NOT ||
399 !ISBASICBLOCK(blocks,i,4))
400 continue;
401 SETARG(codestr, i, (j^1));
402 codestr[i+3] = NOP;
403 break;
404
Guido van Rossumcd16bf62007-06-13 18:07:49 +0000405 /* Replace LOAD_GLOBAL/LOAD_NAME None/True/False
406 with LOAD_CONST None/True/False */
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000407 case LOAD_NAME:
408 case LOAD_GLOBAL:
409 j = GETARG(codestr, i);
410 name = PyString_AsString(PyTuple_GET_ITEM(names, j));
Guido van Rossumcd16bf62007-06-13 18:07:49 +0000411 h = load_global(codestr, i, name, consts);
412 if (h < 0)
413 goto exitUnchanged;
414 else if (h == 0)
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000415 continue;
Guido van Rossumd8faa362007-04-27 19:54:29 +0000416 cumlc = lastlc + 1;
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000417 break;
418
419 /* Skip over LOAD_CONST trueconst
420 JUMP_IF_FALSE xx POP_TOP */
421 case LOAD_CONST:
422 cumlc = lastlc + 1;
423 j = GETARG(codestr, i);
424 if (codestr[i+3] != JUMP_IF_FALSE ||
425 codestr[i+6] != POP_TOP ||
426 !ISBASICBLOCK(blocks,i,7) ||
427 !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
428 continue;
429 memset(codestr+i, NOP, 7);
430 cumlc = 0;
431 break;
432
433 /* Try to fold tuples of constants (includes a case for lists
434 which are only used for "in" and "not in" tests).
435 Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
436 Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
437 Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
438 case BUILD_TUPLE:
439 case BUILD_LIST:
440 j = GETARG(codestr, i);
441 h = i - 3 * j;
442 if (h >= 0 &&
443 j <= lastlc &&
444 ((opcode == BUILD_TUPLE &&
445 ISBASICBLOCK(blocks, h, 3*(j+1))) ||
446 (opcode == BUILD_LIST &&
447 codestr[i+3]==COMPARE_OP &&
448 ISBASICBLOCK(blocks, h, 3*(j+2)) &&
449 (GETARG(codestr,i+3)==6 ||
450 GETARG(codestr,i+3)==7))) &&
451 tuple_of_constants(&codestr[h], j, consts)) {
452 assert(codestr[i] == LOAD_CONST);
453 cumlc = 1;
454 break;
455 }
456 if (codestr[i+3] != UNPACK_SEQUENCE ||
457 !ISBASICBLOCK(blocks,i,6) ||
458 j != GETARG(codestr, i+3))
459 continue;
460 if (j == 1) {
461 memset(codestr+i, NOP, 6);
462 } else if (j == 2) {
463 codestr[i] = ROT_TWO;
464 memset(codestr+i+1, NOP, 5);
465 } else if (j == 3) {
466 codestr[i] = ROT_THREE;
467 codestr[i+1] = ROT_TWO;
468 memset(codestr+i+2, NOP, 4);
469 }
470 break;
471
472 /* Fold binary ops on constants.
473 LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */
474 case BINARY_POWER:
475 case BINARY_MULTIPLY:
476 case BINARY_TRUE_DIVIDE:
477 case BINARY_FLOOR_DIVIDE:
478 case BINARY_MODULO:
479 case BINARY_ADD:
480 case BINARY_SUBTRACT:
481 case BINARY_SUBSCR:
482 case BINARY_LSHIFT:
483 case BINARY_RSHIFT:
484 case BINARY_AND:
485 case BINARY_XOR:
486 case BINARY_OR:
487 if (lastlc >= 2 &&
488 ISBASICBLOCK(blocks, i-6, 7) &&
489 fold_binops_on_constants(&codestr[i-6], consts)) {
490 i -= 2;
491 assert(codestr[i] == LOAD_CONST);
492 cumlc = 1;
493 }
494 break;
495
496 /* Fold unary ops on constants.
497 LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */
498 case UNARY_NEGATIVE:
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000499 case UNARY_INVERT:
500 if (lastlc >= 1 &&
501 ISBASICBLOCK(blocks, i-3, 4) &&
502 fold_unaryops_on_constants(&codestr[i-3], consts)) {
503 i -= 2;
504 assert(codestr[i] == LOAD_CONST);
505 cumlc = 1;
506 }
507 break;
508
509 /* Simplify conditional jump to conditional jump where the
510 result of the first test implies the success of a similar
511 test or the failure of the opposite test.
512 Arises in code like:
513 "if a and b:"
514 "if a or b:"
515 "a and b or c"
516 "(a and b) and c"
517 x:JUMP_IF_FALSE y y:JUMP_IF_FALSE z --> x:JUMP_IF_FALSE z
518 x:JUMP_IF_FALSE y y:JUMP_IF_TRUE z --> x:JUMP_IF_FALSE y+3
519 where y+3 is the instruction following the second test.
520 */
521 case JUMP_IF_FALSE:
522 case JUMP_IF_TRUE:
523 tgt = GETJUMPTGT(codestr, i);
524 j = codestr[tgt];
525 if (j == JUMP_IF_FALSE || j == JUMP_IF_TRUE) {
526 if (j == opcode) {
527 tgttgt = GETJUMPTGT(codestr, tgt) - i - 3;
528 SETARG(codestr, i, tgttgt);
529 } else {
530 tgt -= i;
531 SETARG(codestr, i, tgt);
532 }
533 break;
534 }
535 /* Intentional fallthrough */
536
537 /* Replace jumps to unconditional jumps */
538 case FOR_ITER:
539 case JUMP_FORWARD:
540 case JUMP_ABSOLUTE:
541 case CONTINUE_LOOP:
542 case SETUP_LOOP:
543 case SETUP_EXCEPT:
544 case SETUP_FINALLY:
545 tgt = GETJUMPTGT(codestr, i);
Thomas Wouters89f507f2006-12-13 04:49:30 +0000546 /* Replace JUMP_* to a RETURN into just a RETURN */
547 if (UNCONDITIONAL_JUMP(opcode) &&
548 codestr[tgt] == RETURN_VALUE) {
549 codestr[i] = RETURN_VALUE;
550 memset(codestr+i+1, NOP, 2);
551 continue;
552 }
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000553 if (!UNCONDITIONAL_JUMP(codestr[tgt]))
554 continue;
555 tgttgt = GETJUMPTGT(codestr, tgt);
556 if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
557 opcode = JUMP_ABSOLUTE;
558 if (!ABSOLUTE_JUMP(opcode))
559 tgttgt -= i + 3; /* Calc relative jump addr */
560 if (tgttgt < 0) /* No backward relative jumps */
561 continue;
562 codestr[i] = opcode;
563 SETARG(codestr, i, tgttgt);
564 break;
565
566 case EXTENDED_ARG:
Guido van Rossum0240b922007-02-26 21:23:50 +0000567 if (codestr[i+3] != MAKE_FUNCTION)
568 goto exitUnchanged;
569 /* don't visit MAKE_FUNCTION as GETARG will be wrong */
570 i += 3;
571 break;
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000572
573 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
Thomas Wouters89f507f2006-12-13 04:49:30 +0000574 /* Remove unreachable JUMPs after RETURN */
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000575 case RETURN_VALUE:
Thomas Wouters89f507f2006-12-13 04:49:30 +0000576 if (i+4 >= codelen)
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000577 continue;
Thomas Wouters89f507f2006-12-13 04:49:30 +0000578 if (codestr[i+4] == RETURN_VALUE &&
579 ISBASICBLOCK(blocks,i,5))
580 memset(codestr+i+1, NOP, 4);
581 else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&
582 ISBASICBLOCK(blocks,i,4))
583 memset(codestr+i+1, NOP, 3);
Thomas Wouters00ee7ba2006-08-21 19:07:27 +0000584 break;
585 }
586 }
587
588 /* Fixup linenotab */
589 for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
590 addrmap[i] = i - nops;
591 if (codestr[i] == NOP)
592 nops++;
593 }
594 cum_orig_line = 0;
595 last_line = 0;
596 for (i=0 ; i < tabsiz ; i+=2) {
597 cum_orig_line += lineno[i];
598 new_line = addrmap[cum_orig_line];
599 assert (new_line - last_line < 255);
600 lineno[i] =((unsigned char)(new_line - last_line));
601 last_line = new_line;
602 }
603
604 /* Remove NOPs and fixup jump targets */
605 for (i=0, h=0 ; i<codelen ; ) {
606 opcode = codestr[i];
607 switch (opcode) {
608 case NOP:
609 i++;
610 continue;
611
612 case JUMP_ABSOLUTE:
613 case CONTINUE_LOOP:
614 j = addrmap[GETARG(codestr, i)];
615 SETARG(codestr, i, j);
616 break;
617
618 case FOR_ITER:
619 case JUMP_FORWARD:
620 case JUMP_IF_FALSE:
621 case JUMP_IF_TRUE:
622 case SETUP_LOOP:
623 case SETUP_EXCEPT:
624 case SETUP_FINALLY:
625 j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
626 SETARG(codestr, i, j);
627 break;
628 }
629 adj = CODESIZE(opcode);
630 while (adj--)
631 codestr[h++] = codestr[i++];
632 }
633 assert(h + nops == codelen);
634
635 code = PyString_FromStringAndSize((char *)codestr, h);
636 PyMem_Free(addrmap);
637 PyMem_Free(codestr);
638 PyMem_Free(blocks);
639 return code;
640
641 exitUnchanged:
642 if (blocks != NULL)
643 PyMem_Free(blocks);
644 if (addrmap != NULL)
645 PyMem_Free(addrmap);
646 if (codestr != NULL)
647 PyMem_Free(codestr);
648 Py_INCREF(code);
649 return code;
650}