]> git.proxmox.com Git - mirror_edk2.git/blobdiff - AppPkg/Applications/Python/Python-2.7.10/Python/peephole.c
edk2: Remove AppPkg, StdLib, StdLibPrivateInternalFiles
[mirror_edk2.git] / AppPkg / Applications / Python / Python-2.7.10 / Python / peephole.c
diff --git a/AppPkg/Applications/Python/Python-2.7.10/Python/peephole.c b/AppPkg/Applications/Python/Python-2.7.10/Python/peephole.c
deleted file mode 100644 (file)
index 54404a3..0000000
+++ /dev/null
@@ -1,667 +0,0 @@
-/* Peephole optimizations for bytecode compiler. */\r
-\r
-#include "Python.h"\r
-\r
-#include "Python-ast.h"\r
-#include "node.h"\r
-#include "pyarena.h"\r
-#include "ast.h"\r
-#include "code.h"\r
-#include "compile.h"\r
-#include "symtable.h"\r
-#include "opcode.h"\r
-\r
-#define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))\r
-#define UNCONDITIONAL_JUMP(op)  (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)\r
-#define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \\r
-    || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)\r
-#define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \\r
-    || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \\r
-    || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)\r
-#define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)\r
-#define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))\r
-#define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255\r
-#define CODESIZE(op)  (HAS_ARG(op) ? 3 : 1)\r
-#define ISBASICBLOCK(blocks, start, bytes) \\r
-    (blocks[start]==blocks[start+bytes-1])\r
-\r
-/* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n\r
-   with    LOAD_CONST (c1, c2, ... cn).\r
-   The consts table must still be in list form so that the\r
-   new constant (c1, c2, ... cn) can be appended.\r
-   Called with codestr pointing to the first LOAD_CONST.\r
-   Bails out with no change if one or more of the LOAD_CONSTs is missing.\r
-   Also works for BUILD_LIST when followed by an "in" or "not in" test.\r
-*/\r
-static int\r
-tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)\r
-{\r
-    PyObject *newconst, *constant;\r
-    Py_ssize_t i, arg, len_consts;\r
-\r
-    /* Pre-conditions */\r
-    assert(PyList_CheckExact(consts));\r
-    assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);\r
-    assert(GETARG(codestr, (n*3)) == n);\r
-    for (i=0 ; i<n ; i++)\r
-        assert(codestr[i*3] == LOAD_CONST);\r
-\r
-    /* Buildup new tuple of constants */\r
-    newconst = PyTuple_New(n);\r
-    if (newconst == NULL)\r
-        return 0;\r
-    len_consts = PyList_GET_SIZE(consts);\r
-    for (i=0 ; i<n ; i++) {\r
-        arg = GETARG(codestr, (i*3));\r
-        assert(arg < len_consts);\r
-        constant = PyList_GET_ITEM(consts, arg);\r
-        Py_INCREF(constant);\r
-        PyTuple_SET_ITEM(newconst, i, constant);\r
-    }\r
-\r
-    /* Append folded constant onto consts */\r
-    if (PyList_Append(consts, newconst)) {\r
-        Py_DECREF(newconst);\r
-        return 0;\r
-    }\r
-    Py_DECREF(newconst);\r
-\r
-    /* Write NOPs over old LOAD_CONSTS and\r
-       add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */\r
-    memset(codestr, NOP, n*3);\r
-    codestr[n*3] = LOAD_CONST;\r
-    SETARG(codestr, (n*3), len_consts);\r
-    return 1;\r
-}\r
-\r
-/* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP\r
-   with    LOAD_CONST binop(c1,c2)\r
-   The consts table must still be in list form so that the\r
-   new constant can be appended.\r
-   Called with codestr pointing to the first LOAD_CONST.\r
-   Abandons the transformation if the folding fails (i.e.  1+'a').\r
-   If the new constant is a sequence, only folds when the size\r
-   is below a threshold value.  That keeps pyc files from\r
-   becoming large in the presence of code like:  (None,)*1000.\r
-*/\r
-static int\r
-fold_binops_on_constants(unsigned char *codestr, PyObject *consts)\r
-{\r
-    PyObject *newconst, *v, *w;\r
-    Py_ssize_t len_consts, size;\r
-    int opcode;\r
-\r
-    /* Pre-conditions */\r
-    assert(PyList_CheckExact(consts));\r
-    assert(codestr[0] == LOAD_CONST);\r
-    assert(codestr[3] == LOAD_CONST);\r
-\r
-    /* Create new constant */\r
-    v = PyList_GET_ITEM(consts, GETARG(codestr, 0));\r
-    w = PyList_GET_ITEM(consts, GETARG(codestr, 3));\r
-    opcode = codestr[6];\r
-    switch (opcode) {\r
-        case BINARY_POWER:\r
-            newconst = PyNumber_Power(v, w, Py_None);\r
-            break;\r
-        case BINARY_MULTIPLY:\r
-            newconst = PyNumber_Multiply(v, w);\r
-            break;\r
-        case BINARY_DIVIDE:\r
-            /* Cannot fold this operation statically since\r
-               the result can depend on the run-time presence\r
-               of the -Qnew flag */\r
-            return 0;\r
-        case BINARY_TRUE_DIVIDE:\r
-            newconst = PyNumber_TrueDivide(v, w);\r
-            break;\r
-        case BINARY_FLOOR_DIVIDE:\r
-            newconst = PyNumber_FloorDivide(v, w);\r
-            break;\r
-        case BINARY_MODULO:\r
-            newconst = PyNumber_Remainder(v, w);\r
-            break;\r
-        case BINARY_ADD:\r
-            newconst = PyNumber_Add(v, w);\r
-            break;\r
-        case BINARY_SUBTRACT:\r
-            newconst = PyNumber_Subtract(v, w);\r
-            break;\r
-        case BINARY_SUBSCR:\r
-            /* #5057: if v is unicode, there might be differences between\r
-               wide and narrow builds in cases like '\U00012345'[0] or\r
-               '\U00012345abcdef'[3], so it's better to skip the optimization\r
-               in order to produce compatible pycs.\r
-            */\r
-            if (PyUnicode_Check(v))\r
-                return 0;\r
-            newconst = PyObject_GetItem(v, w);\r
-            break;\r
-        case BINARY_LSHIFT:\r
-            newconst = PyNumber_Lshift(v, w);\r
-            break;\r
-        case BINARY_RSHIFT:\r
-            newconst = PyNumber_Rshift(v, w);\r
-            break;\r
-        case BINARY_AND:\r
-            newconst = PyNumber_And(v, w);\r
-            break;\r
-        case BINARY_XOR:\r
-            newconst = PyNumber_Xor(v, w);\r
-            break;\r
-        case BINARY_OR:\r
-            newconst = PyNumber_Or(v, w);\r
-            break;\r
-        default:\r
-            /* Called with an unknown opcode */\r
-            PyErr_Format(PyExc_SystemError,\r
-                 "unexpected binary operation %d on a constant",\r
-                     opcode);\r
-            return 0;\r
-    }\r
-    if (newconst == NULL) {\r
-        PyErr_Clear();\r
-        return 0;\r
-    }\r
-    size = PyObject_Size(newconst);\r
-    if (size == -1)\r
-        PyErr_Clear();\r
-    else if (size > 20) {\r
-        Py_DECREF(newconst);\r
-        return 0;\r
-    }\r
-\r
-    /* Append folded constant into consts table */\r
-    len_consts = PyList_GET_SIZE(consts);\r
-    if (PyList_Append(consts, newconst)) {\r
-        Py_DECREF(newconst);\r
-        return 0;\r
-    }\r
-    Py_DECREF(newconst);\r
-\r
-    /* Write NOP NOP NOP NOP LOAD_CONST newconst */\r
-    memset(codestr, NOP, 4);\r
-    codestr[4] = LOAD_CONST;\r
-    SETARG(codestr, 4, len_consts);\r
-    return 1;\r
-}\r
-\r
-static int\r
-fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)\r
-{\r
-    PyObject *newconst=NULL, *v;\r
-    Py_ssize_t len_consts;\r
-    int opcode;\r
-\r
-    /* Pre-conditions */\r
-    assert(PyList_CheckExact(consts));\r
-    assert(codestr[0] == LOAD_CONST);\r
-\r
-    /* Create new constant */\r
-    v = PyList_GET_ITEM(consts, GETARG(codestr, 0));\r
-    opcode = codestr[3];\r
-    switch (opcode) {\r
-        case UNARY_NEGATIVE:\r
-            /* Preserve the sign of -0.0 */\r
-            if (PyObject_IsTrue(v) == 1)\r
-                newconst = PyNumber_Negative(v);\r
-            break;\r
-        case UNARY_CONVERT:\r
-            newconst = PyObject_Repr(v);\r
-            break;\r
-        case UNARY_INVERT:\r
-            newconst = PyNumber_Invert(v);\r
-            break;\r
-        default:\r
-            /* Called with an unknown opcode */\r
-            PyErr_Format(PyExc_SystemError,\r
-                 "unexpected unary operation %d on a constant",\r
-                     opcode);\r
-            return 0;\r
-    }\r
-    if (newconst == NULL) {\r
-        PyErr_Clear();\r
-        return 0;\r
-    }\r
-\r
-    /* Append folded constant into consts table */\r
-    len_consts = PyList_GET_SIZE(consts);\r
-    if (PyList_Append(consts, newconst)) {\r
-        Py_DECREF(newconst);\r
-        return 0;\r
-    }\r
-    Py_DECREF(newconst);\r
-\r
-    /* Write NOP LOAD_CONST newconst */\r
-    codestr[0] = NOP;\r
-    codestr[1] = LOAD_CONST;\r
-    SETARG(codestr, 1, len_consts);\r
-    return 1;\r
-}\r
-\r
-static unsigned int *\r
-markblocks(unsigned char *code, Py_ssize_t len)\r
-{\r
-    unsigned int *blocks = PyMem_New(unsigned int, len);\r
-    int i,j, opcode, blockcnt = 0;\r
-\r
-    if (blocks == NULL) {\r
-        PyErr_NoMemory();\r
-        return NULL;\r
-    }\r
-    memset(blocks, 0, len*sizeof(int));\r
-\r
-    /* Mark labels in the first pass */\r
-    for (i=0 ; i<len ; i+=CODESIZE(opcode)) {\r
-        opcode = code[i];\r
-        switch (opcode) {\r
-            case FOR_ITER:\r
-            case JUMP_FORWARD:\r
-            case JUMP_IF_FALSE_OR_POP:\r
-            case JUMP_IF_TRUE_OR_POP:\r
-            case POP_JUMP_IF_FALSE:\r
-            case POP_JUMP_IF_TRUE:\r
-            case JUMP_ABSOLUTE:\r
-            case CONTINUE_LOOP:\r
-            case SETUP_LOOP:\r
-            case SETUP_EXCEPT:\r
-            case SETUP_FINALLY:\r
-            case SETUP_WITH:\r
-                j = GETJUMPTGT(code, i);\r
-                blocks[j] = 1;\r
-                break;\r
-        }\r
-    }\r
-    /* Build block numbers in the second pass */\r
-    for (i=0 ; i<len ; i++) {\r
-        blockcnt += blocks[i];          /* increment blockcnt over labels */\r
-        blocks[i] = blockcnt;\r
-    }\r
-    return blocks;\r
-}\r
-\r
-/* Perform basic peephole optimizations to components of a code object.\r
-   The consts object should still be in list form to allow new constants\r
-   to be appended.\r
-\r
-   To keep the optimizer simple, it bails out (does nothing) for code\r
-   containing extended arguments or that has a length over 32,700.  That\r
-   allows us to avoid overflow and sign issues.  Likewise, it bails when\r
-   the lineno table has complex encoding for gaps >= 255.\r
-\r
-   Optimizations are restricted to simple transformations occuring within a\r
-   single basic block.  All transformations keep the code size the same or\r
-   smaller.  For those that reduce size, the gaps are initially filled with\r
-   NOPs.  Later those NOPs are removed and the jump addresses retargeted in\r
-   a single pass.  Line numbering is adjusted accordingly. */\r
-\r
-PyObject *\r
-PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,\r
-                PyObject *lineno_obj)\r
-{\r
-    Py_ssize_t i, j, codelen;\r
-    int nops, h, adj;\r
-    int tgt, tgttgt, opcode;\r
-    unsigned char *codestr = NULL;\r
-    unsigned char *lineno;\r
-    int *addrmap = NULL;\r
-    int new_line, cum_orig_line, last_line, tabsiz;\r
-    int cumlc=0, lastlc=0;      /* Count runs of consecutive LOAD_CONSTs */\r
-    unsigned int *blocks = NULL;\r
-    char *name;\r
-\r
-    /* Bail out if an exception is set */\r
-    if (PyErr_Occurred())\r
-        goto exitError;\r
-\r
-    /* Bypass optimization when the lineno table is too complex */\r
-    assert(PyString_Check(lineno_obj));\r
-    lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);\r
-    tabsiz = PyString_GET_SIZE(lineno_obj);\r
-    if (memchr(lineno, 255, tabsiz) != NULL)\r
-        goto exitUnchanged;\r
-\r
-    /* Avoid situations where jump retargeting could overflow */\r
-    assert(PyString_Check(code));\r
-    codelen = PyString_GET_SIZE(code);\r
-    if (codelen > 32700)\r
-        goto exitUnchanged;\r
-\r
-    /* Make a modifiable copy of the code string */\r
-    codestr = (unsigned char *)PyMem_Malloc(codelen);\r
-    if (codestr == NULL)\r
-        goto exitError;\r
-    codestr = (unsigned char *)memcpy(codestr,\r
-                                      PyString_AS_STRING(code), codelen);\r
-\r
-    /* Verify that RETURN_VALUE terminates the codestring. This allows\r
-       the various transformation patterns to look ahead several\r
-       instructions without additional checks to make sure they are not\r
-       looking beyond the end of the code string.\r
-    */\r
-    if (codestr[codelen-1] != RETURN_VALUE)\r
-        goto exitUnchanged;\r
-\r
-    /* Mapping to new jump targets after NOPs are removed */\r
-    addrmap = PyMem_New(int, codelen);\r
-    if (addrmap == NULL) {\r
-        PyErr_NoMemory();\r
-        goto exitError;\r
-    }\r
-\r
-    blocks = markblocks(codestr, codelen);\r
-    if (blocks == NULL)\r
-        goto exitError;\r
-    assert(PyList_Check(consts));\r
-\r
-    for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {\r
-      reoptimize_current:\r
-        opcode = codestr[i];\r
-\r
-        lastlc = cumlc;\r
-        cumlc = 0;\r
-\r
-        switch (opcode) {\r
-            /* Replace UNARY_NOT POP_JUMP_IF_FALSE\r
-               with    POP_JUMP_IF_TRUE */\r
-            case UNARY_NOT:\r
-                if (codestr[i+1] != POP_JUMP_IF_FALSE\r
-                    || !ISBASICBLOCK(blocks,i,4))\r
-                    continue;\r
-                j = GETARG(codestr, i+1);\r
-                codestr[i] = POP_JUMP_IF_TRUE;\r
-                SETARG(codestr, i, j);\r
-                codestr[i+3] = NOP;\r
-                goto reoptimize_current;\r
-\r
-                /* not a is b -->  a is not b\r
-                   not a in b -->  a not in b\r
-                   not a is not b -->  a is b\r
-                   not a not in b -->  a in b\r
-                */\r
-            case COMPARE_OP:\r
-                j = GETARG(codestr, i);\r
-                if (j < 6  ||  j > 9  ||\r
-                    codestr[i+3] != UNARY_NOT  ||\r
-                    !ISBASICBLOCK(blocks,i,4))\r
-                    continue;\r
-                SETARG(codestr, i, (j^1));\r
-                codestr[i+3] = NOP;\r
-                break;\r
-\r
-                /* Replace LOAD_GLOBAL/LOAD_NAME None\r
-                   with LOAD_CONST None */\r
-            case LOAD_NAME:\r
-            case LOAD_GLOBAL:\r
-                j = GETARG(codestr, i);\r
-                name = PyString_AsString(PyTuple_GET_ITEM(names, j));\r
-                if (name == NULL  ||  strcmp(name, "None") != 0)\r
-                    continue;\r
-                for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {\r
-                    if (PyList_GET_ITEM(consts, j) == Py_None)\r
-                        break;\r
-                }\r
-                if (j == PyList_GET_SIZE(consts)) {\r
-                    if (PyList_Append(consts, Py_None) == -1)\r
-                        goto exitError;\r
-                }\r
-                assert(PyList_GET_ITEM(consts, j) == Py_None);\r
-                codestr[i] = LOAD_CONST;\r
-                SETARG(codestr, i, j);\r
-                cumlc = lastlc + 1;\r
-                break;\r
-\r
-                /* Skip over LOAD_CONST trueconst\r
-                   POP_JUMP_IF_FALSE xx. This improves\r
-                   "while 1" performance. */\r
-            case LOAD_CONST:\r
-                cumlc = lastlc + 1;\r
-                j = GETARG(codestr, i);\r
-                if (codestr[i+3] != POP_JUMP_IF_FALSE  ||\r
-                    !ISBASICBLOCK(blocks,i,6)  ||\r
-                    !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))\r
-                    continue;\r
-                memset(codestr+i, NOP, 6);\r
-                cumlc = 0;\r
-                break;\r
-\r
-                /* Try to fold tuples of constants (includes a case for lists\r
-                   which are only used for "in" and "not in" tests).\r
-                   Skip over BUILD_SEQN 1 UNPACK_SEQN 1.\r
-                   Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.\r
-                   Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */\r
-            case BUILD_TUPLE:\r
-            case BUILD_LIST:\r
-                j = GETARG(codestr, i);\r
-                h = i - 3 * j;\r
-                if (h >= 0 &&\r
-                    j <= lastlc &&\r
-                    ((opcode == BUILD_TUPLE &&\r
-                      ISBASICBLOCK(blocks, h, 3*(j+1))) ||\r
-                     (opcode == BUILD_LIST &&\r
-                      codestr[i+3]==COMPARE_OP &&\r
-                      ISBASICBLOCK(blocks, h, 3*(j+2)) &&\r
-                      (GETARG(codestr,i+3)==6 ||\r
-                       GETARG(codestr,i+3)==7))) &&\r
-                    tuple_of_constants(&codestr[h], j, consts)) {\r
-                    assert(codestr[i] == LOAD_CONST);\r
-                    cumlc = 1;\r
-                    break;\r
-                }\r
-                if (codestr[i+3] != UNPACK_SEQUENCE  ||\r
-                    !ISBASICBLOCK(blocks,i,6) ||\r
-                    j != GETARG(codestr, i+3))\r
-                    continue;\r
-                if (j == 1) {\r
-                    memset(codestr+i, NOP, 6);\r
-                } else if (j == 2) {\r
-                    codestr[i] = ROT_TWO;\r
-                    memset(codestr+i+1, NOP, 5);\r
-                } else if (j == 3) {\r
-                    codestr[i] = ROT_THREE;\r
-                    codestr[i+1] = ROT_TWO;\r
-                    memset(codestr+i+2, NOP, 4);\r
-                }\r
-                break;\r
-\r
-                /* Fold binary ops on constants.\r
-                   LOAD_CONST c1 LOAD_CONST c2 BINOP -->  LOAD_CONST binop(c1,c2) */\r
-            case BINARY_POWER:\r
-            case BINARY_MULTIPLY:\r
-            case BINARY_TRUE_DIVIDE:\r
-            case BINARY_FLOOR_DIVIDE:\r
-            case BINARY_MODULO:\r
-            case BINARY_ADD:\r
-            case BINARY_SUBTRACT:\r
-            case BINARY_SUBSCR:\r
-            case BINARY_LSHIFT:\r
-            case BINARY_RSHIFT:\r
-            case BINARY_AND:\r
-            case BINARY_XOR:\r
-            case BINARY_OR:\r
-                if (lastlc >= 2 &&\r
-                    ISBASICBLOCK(blocks, i-6, 7) &&\r
-                    fold_binops_on_constants(&codestr[i-6], consts)) {\r
-                    i -= 2;\r
-                    assert(codestr[i] == LOAD_CONST);\r
-                    cumlc = 1;\r
-                }\r
-                break;\r
-\r
-                /* Fold unary ops on constants.\r
-                   LOAD_CONST c1  UNARY_OP --> LOAD_CONST unary_op(c) */\r
-            case UNARY_NEGATIVE:\r
-            case UNARY_CONVERT:\r
-            case UNARY_INVERT:\r
-                if (lastlc >= 1 &&\r
-                    ISBASICBLOCK(blocks, i-3, 4) &&\r
-                    fold_unaryops_on_constants(&codestr[i-3], consts)) {\r
-                    i -= 2;\r
-                    assert(codestr[i] == LOAD_CONST);\r
-                    cumlc = 1;\r
-                }\r
-                break;\r
-\r
-                /* Simplify conditional jump to conditional jump where the\r
-                   result of the first test implies the success of a similar\r
-                   test or the failure of the opposite test.\r
-                   Arises in code like:\r
-                   "if a and b:"\r
-                   "if a or b:"\r
-                   "a and b or c"\r
-                   "(a and b) and c"\r
-                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_FALSE_OR_POP z\r
-                      -->  x:JUMP_IF_FALSE_OR_POP z\r
-                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_TRUE_OR_POP z\r
-                      -->  x:POP_JUMP_IF_FALSE y+3\r
-                   where y+3 is the instruction following the second test.\r
-                */\r
-            case JUMP_IF_FALSE_OR_POP:\r
-            case JUMP_IF_TRUE_OR_POP:\r
-                tgt = GETJUMPTGT(codestr, i);\r
-                j = codestr[tgt];\r
-                if (CONDITIONAL_JUMP(j)) {\r
-                    /* NOTE: all possible jumps here are absolute! */\r
-                    if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {\r
-                        /* The second jump will be\r
-                           taken iff the first is. */\r
-                        tgttgt = GETJUMPTGT(codestr, tgt);\r
-                        /* The current opcode inherits\r
-                           its target's stack behaviour */\r
-                        codestr[i] = j;\r
-                        SETARG(codestr, i, tgttgt);\r
-                        goto reoptimize_current;\r
-                    } else {\r
-                        /* The second jump is not taken if the first is (so\r
-                           jump past it), and all conditional jumps pop their\r
-                           argument when they're not taken (so change the\r
-                           first jump to pop its argument when it's taken). */\r
-                        if (JUMPS_ON_TRUE(opcode))\r
-                            codestr[i] = POP_JUMP_IF_TRUE;\r
-                        else\r
-                            codestr[i] = POP_JUMP_IF_FALSE;\r
-                        SETARG(codestr, i, (tgt + 3));\r
-                        goto reoptimize_current;\r
-                    }\r
-                }\r
-                /* Intentional fallthrough */\r
-\r
-                /* Replace jumps to unconditional jumps */\r
-            case POP_JUMP_IF_FALSE:\r
-            case POP_JUMP_IF_TRUE:\r
-            case FOR_ITER:\r
-            case JUMP_FORWARD:\r
-            case JUMP_ABSOLUTE:\r
-            case CONTINUE_LOOP:\r
-            case SETUP_LOOP:\r
-            case SETUP_EXCEPT:\r
-            case SETUP_FINALLY:\r
-            case SETUP_WITH:\r
-                tgt = GETJUMPTGT(codestr, i);\r
-                /* Replace JUMP_* to a RETURN into just a RETURN */\r
-                if (UNCONDITIONAL_JUMP(opcode) &&\r
-                    codestr[tgt] == RETURN_VALUE) {\r
-                    codestr[i] = RETURN_VALUE;\r
-                    memset(codestr+i+1, NOP, 2);\r
-                    continue;\r
-                }\r
-                if (!UNCONDITIONAL_JUMP(codestr[tgt]))\r
-                    continue;\r
-                tgttgt = GETJUMPTGT(codestr, tgt);\r
-                if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */\r
-                    opcode = JUMP_ABSOLUTE;\r
-                if (!ABSOLUTE_JUMP(opcode))\r
-                    tgttgt -= i + 3;        /* Calc relative jump addr */\r
-                if (tgttgt < 0)             /* No backward relative jumps */\r
-                    continue;\r
-                codestr[i] = opcode;\r
-                SETARG(codestr, i, tgttgt);\r
-                break;\r
-\r
-            case EXTENDED_ARG:\r
-                goto exitUnchanged;\r
-\r
-                /* Replace RETURN LOAD_CONST None RETURN with just RETURN */\r
-                /* Remove unreachable JUMPs after RETURN */\r
-            case RETURN_VALUE:\r
-                if (i+4 >= codelen)\r
-                    continue;\r
-                if (codestr[i+4] == RETURN_VALUE &&\r
-                    ISBASICBLOCK(blocks,i,5))\r
-                    memset(codestr+i+1, NOP, 4);\r
-                else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&\r
-                         ISBASICBLOCK(blocks,i,4))\r
-                    memset(codestr+i+1, NOP, 3);\r
-                break;\r
-        }\r
-    }\r
-\r
-    /* Fixup linenotab */\r
-    for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {\r
-        addrmap[i] = i - nops;\r
-        if (codestr[i] == NOP)\r
-            nops++;\r
-    }\r
-    cum_orig_line = 0;\r
-    last_line = 0;\r
-    for (i=0 ; i < tabsiz ; i+=2) {\r
-        cum_orig_line += lineno[i];\r
-        new_line = addrmap[cum_orig_line];\r
-        assert (new_line - last_line < 255);\r
-        lineno[i] =((unsigned char)(new_line - last_line));\r
-        last_line = new_line;\r
-    }\r
-\r
-    /* Remove NOPs and fixup jump targets */\r
-    for (i=0, h=0 ; i<codelen ; ) {\r
-        opcode = codestr[i];\r
-        switch (opcode) {\r
-            case NOP:\r
-                i++;\r
-                continue;\r
-\r
-            case JUMP_ABSOLUTE:\r
-            case CONTINUE_LOOP:\r
-            case POP_JUMP_IF_FALSE:\r
-            case POP_JUMP_IF_TRUE:\r
-            case JUMP_IF_FALSE_OR_POP:\r
-            case JUMP_IF_TRUE_OR_POP:\r
-                j = addrmap[GETARG(codestr, i)];\r
-                SETARG(codestr, i, j);\r
-                break;\r
-\r
-            case FOR_ITER:\r
-            case JUMP_FORWARD:\r
-            case SETUP_LOOP:\r
-            case SETUP_EXCEPT:\r
-            case SETUP_FINALLY:\r
-            case SETUP_WITH:\r
-                j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;\r
-                SETARG(codestr, i, j);\r
-                break;\r
-        }\r
-        adj = CODESIZE(opcode);\r
-        while (adj--)\r
-            codestr[h++] = codestr[i++];\r
-    }\r
-    assert(h + nops == codelen);\r
-\r
-    code = PyString_FromStringAndSize((char *)codestr, h);\r
-    PyMem_Free(addrmap);\r
-    PyMem_Free(codestr);\r
-    PyMem_Free(blocks);\r
-    return code;\r
-\r
- exitError:\r
-    code = NULL;\r
-\r
- exitUnchanged:\r
-    if (blocks != NULL)\r
-        PyMem_Free(blocks);\r
-    if (addrmap != NULL)\r
-        PyMem_Free(addrmap);\r
-    if (codestr != NULL)\r
-        PyMem_Free(codestr);\r
-    Py_XINCREF(code);\r
-    return code;\r
-}\r