]> git.proxmox.com Git - mirror_edk2.git/blame - AppPkg/Applications/Python/Python-2.7.10/Python/peephole.c
EmbeddedPkg: Extend NvVarStoreFormattedLib LIBRARY_CLASS
[mirror_edk2.git] / AppPkg / Applications / Python / Python-2.7.10 / Python / peephole.c
CommitLineData
c8042e10
DM
1/* Peephole optimizations for bytecode compiler. */\r
2\r
3#include "Python.h"\r
4\r
5#include "Python-ast.h"\r
6#include "node.h"\r
7#include "pyarena.h"\r
8#include "ast.h"\r
9#include "code.h"\r
10#include "compile.h"\r
11#include "symtable.h"\r
12#include "opcode.h"\r
13\r
14#define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))\r
15#define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)\r
16#define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \\r
17 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)\r
18#define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \\r
19 || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \\r
20 || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)\r
21#define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)\r
22#define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))\r
23#define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255\r
24#define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)\r
25#define ISBASICBLOCK(blocks, start, bytes) \\r
26 (blocks[start]==blocks[start+bytes-1])\r
27\r
28/* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n\r
29 with LOAD_CONST (c1, c2, ... cn).\r
30 The consts table must still be in list form so that the\r
31 new constant (c1, c2, ... cn) can be appended.\r
32 Called with codestr pointing to the first LOAD_CONST.\r
33 Bails out with no change if one or more of the LOAD_CONSTs is missing.\r
34 Also works for BUILD_LIST when followed by an "in" or "not in" test.\r
35*/\r
36static int\r
37tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)\r
38{\r
39 PyObject *newconst, *constant;\r
40 Py_ssize_t i, arg, len_consts;\r
41\r
42 /* Pre-conditions */\r
43 assert(PyList_CheckExact(consts));\r
44 assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);\r
45 assert(GETARG(codestr, (n*3)) == n);\r
46 for (i=0 ; i<n ; i++)\r
47 assert(codestr[i*3] == LOAD_CONST);\r
48\r
49 /* Buildup new tuple of constants */\r
50 newconst = PyTuple_New(n);\r
51 if (newconst == NULL)\r
52 return 0;\r
53 len_consts = PyList_GET_SIZE(consts);\r
54 for (i=0 ; i<n ; i++) {\r
55 arg = GETARG(codestr, (i*3));\r
56 assert(arg < len_consts);\r
57 constant = PyList_GET_ITEM(consts, arg);\r
58 Py_INCREF(constant);\r
59 PyTuple_SET_ITEM(newconst, i, constant);\r
60 }\r
61\r
62 /* Append folded constant onto consts */\r
63 if (PyList_Append(consts, newconst)) {\r
64 Py_DECREF(newconst);\r
65 return 0;\r
66 }\r
67 Py_DECREF(newconst);\r
68\r
69 /* Write NOPs over old LOAD_CONSTS and\r
70 add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */\r
71 memset(codestr, NOP, n*3);\r
72 codestr[n*3] = LOAD_CONST;\r
73 SETARG(codestr, (n*3), len_consts);\r
74 return 1;\r
75}\r
76\r
77/* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP\r
78 with LOAD_CONST binop(c1,c2)\r
79 The consts table must still be in list form so that the\r
80 new constant can be appended.\r
81 Called with codestr pointing to the first LOAD_CONST.\r
82 Abandons the transformation if the folding fails (i.e. 1+'a').\r
83 If the new constant is a sequence, only folds when the size\r
84 is below a threshold value. That keeps pyc files from\r
85 becoming large in the presence of code like: (None,)*1000.\r
86*/\r
87static int\r
88fold_binops_on_constants(unsigned char *codestr, PyObject *consts)\r
89{\r
90 PyObject *newconst, *v, *w;\r
91 Py_ssize_t len_consts, size;\r
92 int opcode;\r
93\r
94 /* Pre-conditions */\r
95 assert(PyList_CheckExact(consts));\r
96 assert(codestr[0] == LOAD_CONST);\r
97 assert(codestr[3] == LOAD_CONST);\r
98\r
99 /* Create new constant */\r
100 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));\r
101 w = PyList_GET_ITEM(consts, GETARG(codestr, 3));\r
102 opcode = codestr[6];\r
103 switch (opcode) {\r
104 case BINARY_POWER:\r
105 newconst = PyNumber_Power(v, w, Py_None);\r
106 break;\r
107 case BINARY_MULTIPLY:\r
108 newconst = PyNumber_Multiply(v, w);\r
109 break;\r
110 case BINARY_DIVIDE:\r
111 /* Cannot fold this operation statically since\r
112 the result can depend on the run-time presence\r
113 of the -Qnew flag */\r
114 return 0;\r
115 case BINARY_TRUE_DIVIDE:\r
116 newconst = PyNumber_TrueDivide(v, w);\r
117 break;\r
118 case BINARY_FLOOR_DIVIDE:\r
119 newconst = PyNumber_FloorDivide(v, w);\r
120 break;\r
121 case BINARY_MODULO:\r
122 newconst = PyNumber_Remainder(v, w);\r
123 break;\r
124 case BINARY_ADD:\r
125 newconst = PyNumber_Add(v, w);\r
126 break;\r
127 case BINARY_SUBTRACT:\r
128 newconst = PyNumber_Subtract(v, w);\r
129 break;\r
130 case BINARY_SUBSCR:\r
131 /* #5057: if v is unicode, there might be differences between\r
132 wide and narrow builds in cases like '\U00012345'[0] or\r
133 '\U00012345abcdef'[3], so it's better to skip the optimization\r
134 in order to produce compatible pycs.\r
135 */\r
136 if (PyUnicode_Check(v))\r
137 return 0;\r
138 newconst = PyObject_GetItem(v, w);\r
139 break;\r
140 case BINARY_LSHIFT:\r
141 newconst = PyNumber_Lshift(v, w);\r
142 break;\r
143 case BINARY_RSHIFT:\r
144 newconst = PyNumber_Rshift(v, w);\r
145 break;\r
146 case BINARY_AND:\r
147 newconst = PyNumber_And(v, w);\r
148 break;\r
149 case BINARY_XOR:\r
150 newconst = PyNumber_Xor(v, w);\r
151 break;\r
152 case BINARY_OR:\r
153 newconst = PyNumber_Or(v, w);\r
154 break;\r
155 default:\r
156 /* Called with an unknown opcode */\r
157 PyErr_Format(PyExc_SystemError,\r
158 "unexpected binary operation %d on a constant",\r
159 opcode);\r
160 return 0;\r
161 }\r
162 if (newconst == NULL) {\r
163 PyErr_Clear();\r
164 return 0;\r
165 }\r
166 size = PyObject_Size(newconst);\r
167 if (size == -1)\r
168 PyErr_Clear();\r
169 else if (size > 20) {\r
170 Py_DECREF(newconst);\r
171 return 0;\r
172 }\r
173\r
174 /* Append folded constant into consts table */\r
175 len_consts = PyList_GET_SIZE(consts);\r
176 if (PyList_Append(consts, newconst)) {\r
177 Py_DECREF(newconst);\r
178 return 0;\r
179 }\r
180 Py_DECREF(newconst);\r
181\r
182 /* Write NOP NOP NOP NOP LOAD_CONST newconst */\r
183 memset(codestr, NOP, 4);\r
184 codestr[4] = LOAD_CONST;\r
185 SETARG(codestr, 4, len_consts);\r
186 return 1;\r
187}\r
188\r
189static int\r
190fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)\r
191{\r
192 PyObject *newconst=NULL, *v;\r
193 Py_ssize_t len_consts;\r
194 int opcode;\r
195\r
196 /* Pre-conditions */\r
197 assert(PyList_CheckExact(consts));\r
198 assert(codestr[0] == LOAD_CONST);\r
199\r
200 /* Create new constant */\r
201 v = PyList_GET_ITEM(consts, GETARG(codestr, 0));\r
202 opcode = codestr[3];\r
203 switch (opcode) {\r
204 case UNARY_NEGATIVE:\r
205 /* Preserve the sign of -0.0 */\r
206 if (PyObject_IsTrue(v) == 1)\r
207 newconst = PyNumber_Negative(v);\r
208 break;\r
209 case UNARY_CONVERT:\r
210 newconst = PyObject_Repr(v);\r
211 break;\r
212 case UNARY_INVERT:\r
213 newconst = PyNumber_Invert(v);\r
214 break;\r
215 default:\r
216 /* Called with an unknown opcode */\r
217 PyErr_Format(PyExc_SystemError,\r
218 "unexpected unary operation %d on a constant",\r
219 opcode);\r
220 return 0;\r
221 }\r
222 if (newconst == NULL) {\r
223 PyErr_Clear();\r
224 return 0;\r
225 }\r
226\r
227 /* Append folded constant into consts table */\r
228 len_consts = PyList_GET_SIZE(consts);\r
229 if (PyList_Append(consts, newconst)) {\r
230 Py_DECREF(newconst);\r
231 return 0;\r
232 }\r
233 Py_DECREF(newconst);\r
234\r
235 /* Write NOP LOAD_CONST newconst */\r
236 codestr[0] = NOP;\r
237 codestr[1] = LOAD_CONST;\r
238 SETARG(codestr, 1, len_consts);\r
239 return 1;\r
240}\r
241\r
242static unsigned int *\r
243markblocks(unsigned char *code, Py_ssize_t len)\r
244{\r
245 unsigned int *blocks = PyMem_New(unsigned int, len);\r
246 int i,j, opcode, blockcnt = 0;\r
247\r
248 if (blocks == NULL) {\r
249 PyErr_NoMemory();\r
250 return NULL;\r
251 }\r
252 memset(blocks, 0, len*sizeof(int));\r
253\r
254 /* Mark labels in the first pass */\r
255 for (i=0 ; i<len ; i+=CODESIZE(opcode)) {\r
256 opcode = code[i];\r
257 switch (opcode) {\r
258 case FOR_ITER:\r
259 case JUMP_FORWARD:\r
260 case JUMP_IF_FALSE_OR_POP:\r
261 case JUMP_IF_TRUE_OR_POP:\r
262 case POP_JUMP_IF_FALSE:\r
263 case POP_JUMP_IF_TRUE:\r
264 case JUMP_ABSOLUTE:\r
265 case CONTINUE_LOOP:\r
266 case SETUP_LOOP:\r
267 case SETUP_EXCEPT:\r
268 case SETUP_FINALLY:\r
269 case SETUP_WITH:\r
270 j = GETJUMPTGT(code, i);\r
271 blocks[j] = 1;\r
272 break;\r
273 }\r
274 }\r
275 /* Build block numbers in the second pass */\r
276 for (i=0 ; i<len ; i++) {\r
277 blockcnt += blocks[i]; /* increment blockcnt over labels */\r
278 blocks[i] = blockcnt;\r
279 }\r
280 return blocks;\r
281}\r
282\r
283/* Perform basic peephole optimizations to components of a code object.\r
284 The consts object should still be in list form to allow new constants\r
285 to be appended.\r
286\r
287 To keep the optimizer simple, it bails out (does nothing) for code\r
288 containing extended arguments or that has a length over 32,700. That\r
289 allows us to avoid overflow and sign issues. Likewise, it bails when\r
290 the lineno table has complex encoding for gaps >= 255.\r
291\r
292 Optimizations are restricted to simple transformations occuring within a\r
293 single basic block. All transformations keep the code size the same or\r
294 smaller. For those that reduce size, the gaps are initially filled with\r
295 NOPs. Later those NOPs are removed and the jump addresses retargeted in\r
296 a single pass. Line numbering is adjusted accordingly. */\r
297\r
298PyObject *\r
299PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,\r
300 PyObject *lineno_obj)\r
301{\r
302 Py_ssize_t i, j, codelen;\r
303 int nops, h, adj;\r
304 int tgt, tgttgt, opcode;\r
305 unsigned char *codestr = NULL;\r
306 unsigned char *lineno;\r
307 int *addrmap = NULL;\r
308 int new_line, cum_orig_line, last_line, tabsiz;\r
309 int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */\r
310 unsigned int *blocks = NULL;\r
311 char *name;\r
312\r
313 /* Bail out if an exception is set */\r
314 if (PyErr_Occurred())\r
315 goto exitError;\r
316\r
317 /* Bypass optimization when the lineno table is too complex */\r
318 assert(PyString_Check(lineno_obj));\r
319 lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);\r
320 tabsiz = PyString_GET_SIZE(lineno_obj);\r
321 if (memchr(lineno, 255, tabsiz) != NULL)\r
322 goto exitUnchanged;\r
323\r
324 /* Avoid situations where jump retargeting could overflow */\r
325 assert(PyString_Check(code));\r
326 codelen = PyString_GET_SIZE(code);\r
327 if (codelen > 32700)\r
328 goto exitUnchanged;\r
329\r
330 /* Make a modifiable copy of the code string */\r
331 codestr = (unsigned char *)PyMem_Malloc(codelen);\r
332 if (codestr == NULL)\r
333 goto exitError;\r
334 codestr = (unsigned char *)memcpy(codestr,\r
335 PyString_AS_STRING(code), codelen);\r
336\r
337 /* Verify that RETURN_VALUE terminates the codestring. This allows\r
338 the various transformation patterns to look ahead several\r
339 instructions without additional checks to make sure they are not\r
340 looking beyond the end of the code string.\r
341 */\r
342 if (codestr[codelen-1] != RETURN_VALUE)\r
343 goto exitUnchanged;\r
344\r
345 /* Mapping to new jump targets after NOPs are removed */\r
346 addrmap = PyMem_New(int, codelen);\r
347 if (addrmap == NULL) {\r
348 PyErr_NoMemory();\r
349 goto exitError;\r
350 }\r
351\r
352 blocks = markblocks(codestr, codelen);\r
353 if (blocks == NULL)\r
354 goto exitError;\r
355 assert(PyList_Check(consts));\r
356\r
357 for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {\r
358 reoptimize_current:\r
359 opcode = codestr[i];\r
360\r
361 lastlc = cumlc;\r
362 cumlc = 0;\r
363\r
364 switch (opcode) {\r
365 /* Replace UNARY_NOT POP_JUMP_IF_FALSE\r
366 with POP_JUMP_IF_TRUE */\r
367 case UNARY_NOT:\r
368 if (codestr[i+1] != POP_JUMP_IF_FALSE\r
369 || !ISBASICBLOCK(blocks,i,4))\r
370 continue;\r
371 j = GETARG(codestr, i+1);\r
372 codestr[i] = POP_JUMP_IF_TRUE;\r
373 SETARG(codestr, i, j);\r
374 codestr[i+3] = NOP;\r
375 goto reoptimize_current;\r
376\r
377 /* not a is b --> a is not b\r
378 not a in b --> a not in b\r
379 not a is not b --> a is b\r
380 not a not in b --> a in b\r
381 */\r
382 case COMPARE_OP:\r
383 j = GETARG(codestr, i);\r
384 if (j < 6 || j > 9 ||\r
385 codestr[i+3] != UNARY_NOT ||\r
386 !ISBASICBLOCK(blocks,i,4))\r
387 continue;\r
388 SETARG(codestr, i, (j^1));\r
389 codestr[i+3] = NOP;\r
390 break;\r
391\r
392 /* Replace LOAD_GLOBAL/LOAD_NAME None\r
393 with LOAD_CONST None */\r
394 case LOAD_NAME:\r
395 case LOAD_GLOBAL:\r
396 j = GETARG(codestr, i);\r
397 name = PyString_AsString(PyTuple_GET_ITEM(names, j));\r
398 if (name == NULL || strcmp(name, "None") != 0)\r
399 continue;\r
400 for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {\r
401 if (PyList_GET_ITEM(consts, j) == Py_None)\r
402 break;\r
403 }\r
404 if (j == PyList_GET_SIZE(consts)) {\r
405 if (PyList_Append(consts, Py_None) == -1)\r
406 goto exitError;\r
407 }\r
408 assert(PyList_GET_ITEM(consts, j) == Py_None);\r
409 codestr[i] = LOAD_CONST;\r
410 SETARG(codestr, i, j);\r
411 cumlc = lastlc + 1;\r
412 break;\r
413\r
414 /* Skip over LOAD_CONST trueconst\r
415 POP_JUMP_IF_FALSE xx. This improves\r
416 "while 1" performance. */\r
417 case LOAD_CONST:\r
418 cumlc = lastlc + 1;\r
419 j = GETARG(codestr, i);\r
420 if (codestr[i+3] != POP_JUMP_IF_FALSE ||\r
421 !ISBASICBLOCK(blocks,i,6) ||\r
422 !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))\r
423 continue;\r
424 memset(codestr+i, NOP, 6);\r
425 cumlc = 0;\r
426 break;\r
427\r
428 /* Try to fold tuples of constants (includes a case for lists\r
429 which are only used for "in" and "not in" tests).\r
430 Skip over BUILD_SEQN 1 UNPACK_SEQN 1.\r
431 Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.\r
432 Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */\r
433 case BUILD_TUPLE:\r
434 case BUILD_LIST:\r
435 j = GETARG(codestr, i);\r
436 h = i - 3 * j;\r
437 if (h >= 0 &&\r
438 j <= lastlc &&\r
439 ((opcode == BUILD_TUPLE &&\r
440 ISBASICBLOCK(blocks, h, 3*(j+1))) ||\r
441 (opcode == BUILD_LIST &&\r
442 codestr[i+3]==COMPARE_OP &&\r
443 ISBASICBLOCK(blocks, h, 3*(j+2)) &&\r
444 (GETARG(codestr,i+3)==6 ||\r
445 GETARG(codestr,i+3)==7))) &&\r
446 tuple_of_constants(&codestr[h], j, consts)) {\r
447 assert(codestr[i] == LOAD_CONST);\r
448 cumlc = 1;\r
449 break;\r
450 }\r
451 if (codestr[i+3] != UNPACK_SEQUENCE ||\r
452 !ISBASICBLOCK(blocks,i,6) ||\r
453 j != GETARG(codestr, i+3))\r
454 continue;\r
455 if (j == 1) {\r
456 memset(codestr+i, NOP, 6);\r
457 } else if (j == 2) {\r
458 codestr[i] = ROT_TWO;\r
459 memset(codestr+i+1, NOP, 5);\r
460 } else if (j == 3) {\r
461 codestr[i] = ROT_THREE;\r
462 codestr[i+1] = ROT_TWO;\r
463 memset(codestr+i+2, NOP, 4);\r
464 }\r
465 break;\r
466\r
467 /* Fold binary ops on constants.\r
468 LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */\r
469 case BINARY_POWER:\r
470 case BINARY_MULTIPLY:\r
471 case BINARY_TRUE_DIVIDE:\r
472 case BINARY_FLOOR_DIVIDE:\r
473 case BINARY_MODULO:\r
474 case BINARY_ADD:\r
475 case BINARY_SUBTRACT:\r
476 case BINARY_SUBSCR:\r
477 case BINARY_LSHIFT:\r
478 case BINARY_RSHIFT:\r
479 case BINARY_AND:\r
480 case BINARY_XOR:\r
481 case BINARY_OR:\r
482 if (lastlc >= 2 &&\r
483 ISBASICBLOCK(blocks, i-6, 7) &&\r
484 fold_binops_on_constants(&codestr[i-6], consts)) {\r
485 i -= 2;\r
486 assert(codestr[i] == LOAD_CONST);\r
487 cumlc = 1;\r
488 }\r
489 break;\r
490\r
491 /* Fold unary ops on constants.\r
492 LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */\r
493 case UNARY_NEGATIVE:\r
494 case UNARY_CONVERT:\r
495 case UNARY_INVERT:\r
496 if (lastlc >= 1 &&\r
497 ISBASICBLOCK(blocks, i-3, 4) &&\r
498 fold_unaryops_on_constants(&codestr[i-3], consts)) {\r
499 i -= 2;\r
500 assert(codestr[i] == LOAD_CONST);\r
501 cumlc = 1;\r
502 }\r
503 break;\r
504\r
505 /* Simplify conditional jump to conditional jump where the\r
506 result of the first test implies the success of a similar\r
507 test or the failure of the opposite test.\r
508 Arises in code like:\r
509 "if a and b:"\r
510 "if a or b:"\r
511 "a and b or c"\r
512 "(a and b) and c"\r
513 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z\r
514 --> x:JUMP_IF_FALSE_OR_POP z\r
515 x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z\r
516 --> x:POP_JUMP_IF_FALSE y+3\r
517 where y+3 is the instruction following the second test.\r
518 */\r
519 case JUMP_IF_FALSE_OR_POP:\r
520 case JUMP_IF_TRUE_OR_POP:\r
521 tgt = GETJUMPTGT(codestr, i);\r
522 j = codestr[tgt];\r
523 if (CONDITIONAL_JUMP(j)) {\r
524 /* NOTE: all possible jumps here are absolute! */\r
525 if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {\r
526 /* The second jump will be\r
527 taken iff the first is. */\r
528 tgttgt = GETJUMPTGT(codestr, tgt);\r
529 /* The current opcode inherits\r
530 its target's stack behaviour */\r
531 codestr[i] = j;\r
532 SETARG(codestr, i, tgttgt);\r
533 goto reoptimize_current;\r
534 } else {\r
535 /* The second jump is not taken if the first is (so\r
536 jump past it), and all conditional jumps pop their\r
537 argument when they're not taken (so change the\r
538 first jump to pop its argument when it's taken). */\r
539 if (JUMPS_ON_TRUE(opcode))\r
540 codestr[i] = POP_JUMP_IF_TRUE;\r
541 else\r
542 codestr[i] = POP_JUMP_IF_FALSE;\r
543 SETARG(codestr, i, (tgt + 3));\r
544 goto reoptimize_current;\r
545 }\r
546 }\r
547 /* Intentional fallthrough */\r
548\r
549 /* Replace jumps to unconditional jumps */\r
550 case POP_JUMP_IF_FALSE:\r
551 case POP_JUMP_IF_TRUE:\r
552 case FOR_ITER:\r
553 case JUMP_FORWARD:\r
554 case JUMP_ABSOLUTE:\r
555 case CONTINUE_LOOP:\r
556 case SETUP_LOOP:\r
557 case SETUP_EXCEPT:\r
558 case SETUP_FINALLY:\r
559 case SETUP_WITH:\r
560 tgt = GETJUMPTGT(codestr, i);\r
561 /* Replace JUMP_* to a RETURN into just a RETURN */\r
562 if (UNCONDITIONAL_JUMP(opcode) &&\r
563 codestr[tgt] == RETURN_VALUE) {\r
564 codestr[i] = RETURN_VALUE;\r
565 memset(codestr+i+1, NOP, 2);\r
566 continue;\r
567 }\r
568 if (!UNCONDITIONAL_JUMP(codestr[tgt]))\r
569 continue;\r
570 tgttgt = GETJUMPTGT(codestr, tgt);\r
571 if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */\r
572 opcode = JUMP_ABSOLUTE;\r
573 if (!ABSOLUTE_JUMP(opcode))\r
574 tgttgt -= i + 3; /* Calc relative jump addr */\r
575 if (tgttgt < 0) /* No backward relative jumps */\r
576 continue;\r
577 codestr[i] = opcode;\r
578 SETARG(codestr, i, tgttgt);\r
579 break;\r
580\r
581 case EXTENDED_ARG:\r
582 goto exitUnchanged;\r
583\r
584 /* Replace RETURN LOAD_CONST None RETURN with just RETURN */\r
585 /* Remove unreachable JUMPs after RETURN */\r
586 case RETURN_VALUE:\r
587 if (i+4 >= codelen)\r
588 continue;\r
589 if (codestr[i+4] == RETURN_VALUE &&\r
590 ISBASICBLOCK(blocks,i,5))\r
591 memset(codestr+i+1, NOP, 4);\r
592 else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&\r
593 ISBASICBLOCK(blocks,i,4))\r
594 memset(codestr+i+1, NOP, 3);\r
595 break;\r
596 }\r
597 }\r
598\r
599 /* Fixup linenotab */\r
600 for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {\r
601 addrmap[i] = i - nops;\r
602 if (codestr[i] == NOP)\r
603 nops++;\r
604 }\r
605 cum_orig_line = 0;\r
606 last_line = 0;\r
607 for (i=0 ; i < tabsiz ; i+=2) {\r
608 cum_orig_line += lineno[i];\r
609 new_line = addrmap[cum_orig_line];\r
610 assert (new_line - last_line < 255);\r
611 lineno[i] =((unsigned char)(new_line - last_line));\r
612 last_line = new_line;\r
613 }\r
614\r
615 /* Remove NOPs and fixup jump targets */\r
616 for (i=0, h=0 ; i<codelen ; ) {\r
617 opcode = codestr[i];\r
618 switch (opcode) {\r
619 case NOP:\r
620 i++;\r
621 continue;\r
622\r
623 case JUMP_ABSOLUTE:\r
624 case CONTINUE_LOOP:\r
625 case POP_JUMP_IF_FALSE:\r
626 case POP_JUMP_IF_TRUE:\r
627 case JUMP_IF_FALSE_OR_POP:\r
628 case JUMP_IF_TRUE_OR_POP:\r
629 j = addrmap[GETARG(codestr, i)];\r
630 SETARG(codestr, i, j);\r
631 break;\r
632\r
633 case FOR_ITER:\r
634 case JUMP_FORWARD:\r
635 case SETUP_LOOP:\r
636 case SETUP_EXCEPT:\r
637 case SETUP_FINALLY:\r
638 case SETUP_WITH:\r
639 j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;\r
640 SETARG(codestr, i, j);\r
641 break;\r
642 }\r
643 adj = CODESIZE(opcode);\r
644 while (adj--)\r
645 codestr[h++] = codestr[i++];\r
646 }\r
647 assert(h + nops == codelen);\r
648\r
649 code = PyString_FromStringAndSize((char *)codestr, h);\r
650 PyMem_Free(addrmap);\r
651 PyMem_Free(codestr);\r
652 PyMem_Free(blocks);\r
653 return code;\r
654\r
655 exitError:\r
656 code = NULL;\r
657\r
658 exitUnchanged:\r
659 if (blocks != NULL)\r
660 PyMem_Free(blocks);\r
661 if (addrmap != NULL)\r
662 PyMem_Free(addrmap);\r
663 if (codestr != NULL)\r
664 PyMem_Free(codestr);\r
665 Py_XINCREF(code);\r
666 return code;\r
667}\r