]> git.proxmox.com Git - mirror_edk2.git/blob - AppPkg/Applications/Python/Python-2.7.10/Python/compile.c
205f04477fb8f914e125b2302456d9e6cc4f3f39
[mirror_edk2.git] / AppPkg / Applications / Python / Python-2.7.10 / Python / compile.c
1 /*
2 * This file compiles an abstract syntax tree (AST) into Python bytecode.
3 *
4 * The primary entry point is PyAST_Compile(), which returns a
5 * PyCodeObject. The compiler makes several passes to build the code
6 * object:
7 * 1. Checks for future statements. See future.c
8 * 2. Builds a symbol table. See symtable.c.
9 * 3. Generate code for basic blocks. See compiler_mod() in this file.
10 * 4. Assemble the basic blocks into final code. See assemble() in
11 * this file.
12 * 5. Optimize the byte code (peephole optimizations). See peephole.c
13 *
14 * Note that compiler_mod() suggests module, but the module ast type
15 * (mod_ty) has cases for expressions and interactive statements.
16 *
17 * CAUTION: The VISIT_* macros abort the current function when they
18 * encounter a problem. So don't invoke them when there is memory
19 * which needs to be released. Code blocks are OK, as the compiler
20 * structure takes care of releasing those. Use the arena to manage
21 * objects.
22 */
23
24 #include "Python.h"
25
26 #include "Python-ast.h"
27 #include "node.h"
28 #include "pyarena.h"
29 #include "ast.h"
30 #include "code.h"
31 #include "compile.h"
32 #include "symtable.h"
33 #include "opcode.h"
34
35 int Py_OptimizeFlag = 0;
36
37 #define DEFAULT_BLOCK_SIZE 16
38 #define DEFAULT_BLOCKS 8
39 #define DEFAULT_CODE_SIZE 128
40 #define DEFAULT_LNOTAB_SIZE 16
41
42 #define COMP_GENEXP 0
43 #define COMP_SETCOMP 1
44 #define COMP_DICTCOMP 2
45
46 struct instr {
47 unsigned i_jabs : 1;
48 unsigned i_jrel : 1;
49 unsigned i_hasarg : 1;
50 unsigned char i_opcode;
51 int i_oparg;
52 struct basicblock_ *i_target; /* target block (if jump instruction) */
53 int i_lineno;
54 };
55
56 typedef struct basicblock_ {
57 /* Each basicblock in a compilation unit is linked via b_list in the
58 reverse order that the block are allocated. b_list points to the next
59 block, not to be confused with b_next, which is next by control flow. */
60 struct basicblock_ *b_list;
61 /* number of instructions used */
62 int b_iused;
63 /* length of instruction array (b_instr) */
64 int b_ialloc;
65 /* pointer to an array of instructions, initially NULL */
66 struct instr *b_instr;
67 /* If b_next is non-NULL, it is a pointer to the next
68 block reached by normal control flow. */
69 struct basicblock_ *b_next;
70 /* b_seen is used to perform a DFS of basicblocks. */
71 unsigned b_seen : 1;
72 /* b_return is true if a RETURN_VALUE opcode is inserted. */
73 unsigned b_return : 1;
74 /* depth of stack upon entry of block, computed by stackdepth() */
75 int b_startdepth;
76 /* instruction offset for block, computed by assemble_jump_offsets() */
77 int b_offset;
78 } basicblock;
79
80 /* fblockinfo tracks the current frame block.
81
82 A frame block is used to handle loops, try/except, and try/finally.
83 It's called a frame block to distinguish it from a basic block in the
84 compiler IR.
85 */
86
87 enum fblocktype { LOOP, EXCEPT, FINALLY_TRY, FINALLY_END };
88
89 struct fblockinfo {
90 enum fblocktype fb_type;
91 basicblock *fb_block;
92 };
93
94 /* The following items change on entry and exit of code blocks.
95 They must be saved and restored when returning to a block.
96 */
97 struct compiler_unit {
98 PySTEntryObject *u_ste;
99
100 PyObject *u_name;
101 /* The following fields are dicts that map objects to
102 the index of them in co_XXX. The index is used as
103 the argument for opcodes that refer to those collections.
104 */
105 PyObject *u_consts; /* all constants */
106 PyObject *u_names; /* all names */
107 PyObject *u_varnames; /* local variables */
108 PyObject *u_cellvars; /* cell variables */
109 PyObject *u_freevars; /* free variables */
110
111 PyObject *u_private; /* for private name mangling */
112
113 int u_argcount; /* number of arguments for block */
114 /* Pointer to the most recently allocated block. By following b_list
115 members, you can reach all early allocated blocks. */
116 basicblock *u_blocks;
117 basicblock *u_curblock; /* pointer to current block */
118
119 int u_nfblocks;
120 struct fblockinfo u_fblock[CO_MAXBLOCKS];
121
122 int u_firstlineno; /* the first lineno of the block */
123 int u_lineno; /* the lineno for the current stmt */
124 bool u_lineno_set; /* boolean to indicate whether instr
125 has been generated with current lineno */
126 };
127
128 /* This struct captures the global state of a compilation.
129
130 The u pointer points to the current compilation unit, while units
131 for enclosing blocks are stored in c_stack. The u and c_stack are
132 managed by compiler_enter_scope() and compiler_exit_scope().
133 */
134
135 struct compiler {
136 const char *c_filename;
137 struct symtable *c_st;
138 PyFutureFeatures *c_future; /* pointer to module's __future__ */
139 PyCompilerFlags *c_flags;
140
141 int c_interactive; /* true if in interactive mode */
142 int c_nestlevel;
143
144 struct compiler_unit *u; /* compiler state for current block */
145 PyObject *c_stack; /* Python list holding compiler_unit ptrs */
146 PyArena *c_arena; /* pointer to memory allocation arena */
147 };
148
149 static int compiler_enter_scope(struct compiler *, identifier, void *, int);
150 static void compiler_free(struct compiler *);
151 static basicblock *compiler_new_block(struct compiler *);
152 static int compiler_next_instr(struct compiler *, basicblock *);
153 static int compiler_addop(struct compiler *, int);
154 static int compiler_addop_o(struct compiler *, int, PyObject *, PyObject *);
155 static int compiler_addop_i(struct compiler *, int, int);
156 static int compiler_addop_j(struct compiler *, int, basicblock *, int);
157 static basicblock *compiler_use_new_block(struct compiler *);
158 static int compiler_error(struct compiler *, const char *);
159 static int compiler_nameop(struct compiler *, identifier, expr_context_ty);
160
161 static PyCodeObject *compiler_mod(struct compiler *, mod_ty);
162 static int compiler_visit_stmt(struct compiler *, stmt_ty);
163 static int compiler_visit_keyword(struct compiler *, keyword_ty);
164 static int compiler_visit_expr(struct compiler *, expr_ty);
165 static int compiler_augassign(struct compiler *, stmt_ty);
166 static int compiler_visit_slice(struct compiler *, slice_ty,
167 expr_context_ty);
168
169 static int compiler_push_fblock(struct compiler *, enum fblocktype,
170 basicblock *);
171 static void compiler_pop_fblock(struct compiler *, enum fblocktype,
172 basicblock *);
173 /* Returns true if there is a loop on the fblock stack. */
174 static int compiler_in_loop(struct compiler *);
175
176 static int inplace_binop(struct compiler *, operator_ty);
177 static int expr_constant(expr_ty e);
178
179 static int compiler_with(struct compiler *, stmt_ty);
180
181 static PyCodeObject *assemble(struct compiler *, int addNone);
182 static PyObject *__doc__;
183
184 #define COMPILER_CAPSULE_NAME_COMPILER_UNIT "compile.c compiler unit"
185
186 PyObject *
187 _Py_Mangle(PyObject *privateobj, PyObject *ident)
188 {
189 /* Name mangling: __private becomes _classname__private.
190 This is independent from how the name is used. */
191 const char *p, *name = PyString_AsString(ident);
192 char *buffer;
193 size_t nlen, plen;
194 if (privateobj == NULL || !PyString_Check(privateobj) ||
195 name == NULL || name[0] != '_' || name[1] != '_') {
196 Py_INCREF(ident);
197 return ident;
198 }
199 p = PyString_AsString(privateobj);
200 nlen = strlen(name);
201 /* Don't mangle __id__ or names with dots.
202
203 The only time a name with a dot can occur is when
204 we are compiling an import statement that has a
205 package name.
206
207 TODO(jhylton): Decide whether we want to support
208 mangling of the module name, e.g. __M.X.
209 */
210 if ((name[nlen-1] == '_' && name[nlen-2] == '_')
211 || strchr(name, '.')) {
212 Py_INCREF(ident);
213 return ident; /* Don't mangle __whatever__ */
214 }
215 /* Strip leading underscores from class name */
216 while (*p == '_')
217 p++;
218 if (*p == '\0') {
219 Py_INCREF(ident);
220 return ident; /* Don't mangle if class is just underscores */
221 }
222 plen = strlen(p);
223
224 if (plen + nlen >= PY_SSIZE_T_MAX - 1) {
225 PyErr_SetString(PyExc_OverflowError,
226 "private identifier too large to be mangled");
227 return NULL;
228 }
229
230 ident = PyString_FromStringAndSize(NULL, 1 + nlen + plen);
231 if (!ident)
232 return 0;
233 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */
234 buffer = PyString_AS_STRING(ident);
235 buffer[0] = '_';
236 strncpy(buffer+1, p, plen);
237 strcpy(buffer+1+plen, name);
238 return ident;
239 }
240
241 static int
242 compiler_init(struct compiler *c)
243 {
244 memset(c, 0, sizeof(struct compiler));
245
246 c->c_stack = PyList_New(0);
247 if (!c->c_stack)
248 return 0;
249
250 return 1;
251 }
252
253 PyCodeObject *
254 PyAST_Compile(mod_ty mod, const char *filename, PyCompilerFlags *flags,
255 PyArena *arena)
256 {
257 struct compiler c;
258 PyCodeObject *co = NULL;
259 PyCompilerFlags local_flags;
260 int merged;
261
262 if (!__doc__) {
263 __doc__ = PyString_InternFromString("__doc__");
264 if (!__doc__)
265 return NULL;
266 }
267
268 if (!compiler_init(&c))
269 return NULL;
270 c.c_filename = filename;
271 c.c_arena = arena;
272 c.c_future = PyFuture_FromAST(mod, filename);
273 if (c.c_future == NULL)
274 goto finally;
275 if (!flags) {
276 local_flags.cf_flags = 0;
277 flags = &local_flags;
278 }
279 merged = c.c_future->ff_features | flags->cf_flags;
280 c.c_future->ff_features = merged;
281 flags->cf_flags = merged;
282 c.c_flags = flags;
283 c.c_nestlevel = 0;
284
285 c.c_st = PySymtable_Build(mod, filename, c.c_future);
286 if (c.c_st == NULL) {
287 if (!PyErr_Occurred())
288 PyErr_SetString(PyExc_SystemError, "no symtable");
289 goto finally;
290 }
291
292 co = compiler_mod(&c, mod);
293
294 finally:
295 compiler_free(&c);
296 assert(co || PyErr_Occurred());
297 return co;
298 }
299
300 PyCodeObject *
301 PyNode_Compile(struct _node *n, const char *filename)
302 {
303 PyCodeObject *co = NULL;
304 mod_ty mod;
305 PyArena *arena = PyArena_New();
306 if (!arena)
307 return NULL;
308 mod = PyAST_FromNode(n, NULL, filename, arena);
309 if (mod)
310 co = PyAST_Compile(mod, filename, NULL, arena);
311 PyArena_Free(arena);
312 return co;
313 }
314
315 static void
316 compiler_free(struct compiler *c)
317 {
318 if (c->c_st)
319 PySymtable_Free(c->c_st);
320 if (c->c_future)
321 PyObject_Free(c->c_future);
322 Py_DECREF(c->c_stack);
323 }
324
325 static PyObject *
326 list2dict(PyObject *list)
327 {
328 Py_ssize_t i, n;
329 PyObject *v, *k;
330 PyObject *dict = PyDict_New();
331 if (!dict) return NULL;
332
333 n = PyList_Size(list);
334 for (i = 0; i < n; i++) {
335 v = PyInt_FromLong(i);
336 if (!v) {
337 Py_DECREF(dict);
338 return NULL;
339 }
340 k = PyList_GET_ITEM(list, i);
341 k = PyTuple_Pack(2, k, k->ob_type);
342 if (k == NULL || PyDict_SetItem(dict, k, v) < 0) {
343 Py_XDECREF(k);
344 Py_DECREF(v);
345 Py_DECREF(dict);
346 return NULL;
347 }
348 Py_DECREF(k);
349 Py_DECREF(v);
350 }
351 return dict;
352 }
353
354 /* Return new dict containing names from src that match scope(s).
355
356 src is a symbol table dictionary. If the scope of a name matches
357 either scope_type or flag is set, insert it into the new dict. The
358 values are integers, starting at offset and increasing by one for
359 each key.
360 */
361
362 static PyObject *
363 dictbytype(PyObject *src, int scope_type, int flag, int offset)
364 {
365 Py_ssize_t i = offset, scope, num_keys, key_i;
366 PyObject *k, *v, *dest = PyDict_New();
367 PyObject *sorted_keys;
368
369 assert(offset >= 0);
370 if (dest == NULL)
371 return NULL;
372
373 /* Sort the keys so that we have a deterministic order on the indexes
374 saved in the returned dictionary. These indexes are used as indexes
375 into the free and cell var storage. Therefore if they aren't
376 deterministic, then the generated bytecode is not deterministic.
377 */
378 sorted_keys = PyDict_Keys(src);
379 if (sorted_keys == NULL)
380 return NULL;
381 if (PyList_Sort(sorted_keys) != 0) {
382 Py_DECREF(sorted_keys);
383 return NULL;
384 }
385 num_keys = PyList_GET_SIZE(sorted_keys);
386
387 for (key_i = 0; key_i < num_keys; key_i++) {
388 k = PyList_GET_ITEM(sorted_keys, key_i);
389 v = PyDict_GetItem(src, k);
390 /* XXX this should probably be a macro in symtable.h */
391 assert(PyInt_Check(v));
392 scope = (PyInt_AS_LONG(v) >> SCOPE_OFF) & SCOPE_MASK;
393
394 if (scope == scope_type || PyInt_AS_LONG(v) & flag) {
395 PyObject *tuple, *item = PyInt_FromLong(i);
396 if (item == NULL) {
397 Py_DECREF(sorted_keys);
398 Py_DECREF(dest);
399 return NULL;
400 }
401 i++;
402 tuple = PyTuple_Pack(2, k, k->ob_type);
403 if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) {
404 Py_DECREF(sorted_keys);
405 Py_DECREF(item);
406 Py_DECREF(dest);
407 Py_XDECREF(tuple);
408 return NULL;
409 }
410 Py_DECREF(item);
411 Py_DECREF(tuple);
412 }
413 }
414 Py_DECREF(sorted_keys);
415 return dest;
416 }
417
418 static void
419 compiler_unit_check(struct compiler_unit *u)
420 {
421 basicblock *block;
422 for (block = u->u_blocks; block != NULL; block = block->b_list) {
423 assert((void *)block != (void *)0xcbcbcbcb);
424 assert((void *)block != (void *)0xfbfbfbfb);
425 assert((void *)block != (void *)0xdbdbdbdb);
426 if (block->b_instr != NULL) {
427 assert(block->b_ialloc > 0);
428 assert(block->b_iused > 0);
429 assert(block->b_ialloc >= block->b_iused);
430 }
431 else {
432 assert (block->b_iused == 0);
433 assert (block->b_ialloc == 0);
434 }
435 }
436 }
437
438 static void
439 compiler_unit_free(struct compiler_unit *u)
440 {
441 basicblock *b, *next;
442
443 compiler_unit_check(u);
444 b = u->u_blocks;
445 while (b != NULL) {
446 if (b->b_instr)
447 PyObject_Free((void *)b->b_instr);
448 next = b->b_list;
449 PyObject_Free((void *)b);
450 b = next;
451 }
452 Py_CLEAR(u->u_ste);
453 Py_CLEAR(u->u_name);
454 Py_CLEAR(u->u_consts);
455 Py_CLEAR(u->u_names);
456 Py_CLEAR(u->u_varnames);
457 Py_CLEAR(u->u_freevars);
458 Py_CLEAR(u->u_cellvars);
459 Py_CLEAR(u->u_private);
460 PyObject_Free(u);
461 }
462
463 static int
464 compiler_enter_scope(struct compiler *c, identifier name, void *key,
465 int lineno)
466 {
467 struct compiler_unit *u;
468
469 u = (struct compiler_unit *)PyObject_Malloc(sizeof(
470 struct compiler_unit));
471 if (!u) {
472 PyErr_NoMemory();
473 return 0;
474 }
475 memset(u, 0, sizeof(struct compiler_unit));
476 u->u_argcount = 0;
477 u->u_ste = PySymtable_Lookup(c->c_st, key);
478 if (!u->u_ste) {
479 compiler_unit_free(u);
480 return 0;
481 }
482 Py_INCREF(name);
483 u->u_name = name;
484 u->u_varnames = list2dict(u->u_ste->ste_varnames);
485 u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
486 if (!u->u_varnames || !u->u_cellvars) {
487 compiler_unit_free(u);
488 return 0;
489 }
490
491 u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS,
492 PyDict_Size(u->u_cellvars));
493 if (!u->u_freevars) {
494 compiler_unit_free(u);
495 return 0;
496 }
497
498 u->u_blocks = NULL;
499 u->u_nfblocks = 0;
500 u->u_firstlineno = lineno;
501 u->u_lineno = 0;
502 u->u_lineno_set = false;
503 u->u_consts = PyDict_New();
504 if (!u->u_consts) {
505 compiler_unit_free(u);
506 return 0;
507 }
508 u->u_names = PyDict_New();
509 if (!u->u_names) {
510 compiler_unit_free(u);
511 return 0;
512 }
513
514 u->u_private = NULL;
515
516 /* Push the old compiler_unit on the stack. */
517 if (c->u) {
518 PyObject *capsule = PyCapsule_New(c->u, COMPILER_CAPSULE_NAME_COMPILER_UNIT, NULL);
519 if (!capsule || PyList_Append(c->c_stack, capsule) < 0) {
520 Py_XDECREF(capsule);
521 compiler_unit_free(u);
522 return 0;
523 }
524 Py_DECREF(capsule);
525 u->u_private = c->u->u_private;
526 Py_XINCREF(u->u_private);
527 }
528 c->u = u;
529
530 c->c_nestlevel++;
531 if (compiler_use_new_block(c) == NULL)
532 return 0;
533
534 return 1;
535 }
536
537 static void
538 compiler_exit_scope(struct compiler *c)
539 {
540 int n;
541 PyObject *capsule;
542
543 c->c_nestlevel--;
544 compiler_unit_free(c->u);
545 /* Restore c->u to the parent unit. */
546 n = PyList_GET_SIZE(c->c_stack) - 1;
547 if (n >= 0) {
548 capsule = PyList_GET_ITEM(c->c_stack, n);
549 c->u = (struct compiler_unit *)PyCapsule_GetPointer(capsule, COMPILER_CAPSULE_NAME_COMPILER_UNIT);
550 assert(c->u);
551 /* we are deleting from a list so this really shouldn't fail */
552 if (PySequence_DelItem(c->c_stack, n) < 0)
553 Py_FatalError("compiler_exit_scope()");
554 compiler_unit_check(c->u);
555 }
556 else
557 c->u = NULL;
558
559 }
560
561 /* Allocate a new block and return a pointer to it.
562 Returns NULL on error.
563 */
564
565 static basicblock *
566 compiler_new_block(struct compiler *c)
567 {
568 basicblock *b;
569 struct compiler_unit *u;
570
571 u = c->u;
572 b = (basicblock *)PyObject_Malloc(sizeof(basicblock));
573 if (b == NULL) {
574 PyErr_NoMemory();
575 return NULL;
576 }
577 memset((void *)b, 0, sizeof(basicblock));
578 /* Extend the singly linked list of blocks with new block. */
579 b->b_list = u->u_blocks;
580 u->u_blocks = b;
581 return b;
582 }
583
584 static basicblock *
585 compiler_use_new_block(struct compiler *c)
586 {
587 basicblock *block = compiler_new_block(c);
588 if (block == NULL)
589 return NULL;
590 c->u->u_curblock = block;
591 return block;
592 }
593
594 static basicblock *
595 compiler_next_block(struct compiler *c)
596 {
597 basicblock *block = compiler_new_block(c);
598 if (block == NULL)
599 return NULL;
600 c->u->u_curblock->b_next = block;
601 c->u->u_curblock = block;
602 return block;
603 }
604
605 static basicblock *
606 compiler_use_next_block(struct compiler *c, basicblock *block)
607 {
608 assert(block != NULL);
609 c->u->u_curblock->b_next = block;
610 c->u->u_curblock = block;
611 return block;
612 }
613
614 /* Returns the offset of the next instruction in the current block's
615 b_instr array. Resizes the b_instr as necessary.
616 Returns -1 on failure.
617 */
618
619 static int
620 compiler_next_instr(struct compiler *c, basicblock *b)
621 {
622 assert(b != NULL);
623 if (b->b_instr == NULL) {
624 b->b_instr = (struct instr *)PyObject_Malloc(
625 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
626 if (b->b_instr == NULL) {
627 PyErr_NoMemory();
628 return -1;
629 }
630 b->b_ialloc = DEFAULT_BLOCK_SIZE;
631 memset((char *)b->b_instr, 0,
632 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
633 }
634 else if (b->b_iused == b->b_ialloc) {
635 struct instr *tmp;
636 size_t oldsize, newsize;
637 oldsize = b->b_ialloc * sizeof(struct instr);
638 newsize = oldsize << 1;
639
640 if (oldsize > (PY_SIZE_MAX >> 1)) {
641 PyErr_NoMemory();
642 return -1;
643 }
644
645 if (newsize == 0) {
646 PyErr_NoMemory();
647 return -1;
648 }
649 b->b_ialloc <<= 1;
650 tmp = (struct instr *)PyObject_Realloc(
651 (void *)b->b_instr, newsize);
652 if (tmp == NULL) {
653 PyErr_NoMemory();
654 return -1;
655 }
656 b->b_instr = tmp;
657 memset((char *)b->b_instr + oldsize, 0, newsize - oldsize);
658 }
659 return b->b_iused++;
660 }
661
662 /* Set the i_lineno member of the instruction at offset off if the
663 line number for the current expression/statement has not
664 already been set. If it has been set, the call has no effect.
665
666 The line number is reset in the following cases:
667 - when entering a new scope
668 - on each statement
669 - on each expression that start a new line
670 - before the "except" clause
671 - before the "for" and "while" expressions
672 */
673
674 static void
675 compiler_set_lineno(struct compiler *c, int off)
676 {
677 basicblock *b;
678 if (c->u->u_lineno_set)
679 return;
680 c->u->u_lineno_set = true;
681 b = c->u->u_curblock;
682 b->b_instr[off].i_lineno = c->u->u_lineno;
683 }
684
685 static int
686 opcode_stack_effect(int opcode, int oparg)
687 {
688 switch (opcode) {
689 case POP_TOP:
690 return -1;
691 case ROT_TWO:
692 case ROT_THREE:
693 return 0;
694 case DUP_TOP:
695 return 1;
696 case ROT_FOUR:
697 return 0;
698
699 case UNARY_POSITIVE:
700 case UNARY_NEGATIVE:
701 case UNARY_NOT:
702 case UNARY_CONVERT:
703 case UNARY_INVERT:
704 return 0;
705
706 case SET_ADD:
707 case LIST_APPEND:
708 return -1;
709
710 case MAP_ADD:
711 return -2;
712
713 case BINARY_POWER:
714 case BINARY_MULTIPLY:
715 case BINARY_DIVIDE:
716 case BINARY_MODULO:
717 case BINARY_ADD:
718 case BINARY_SUBTRACT:
719 case BINARY_SUBSCR:
720 case BINARY_FLOOR_DIVIDE:
721 case BINARY_TRUE_DIVIDE:
722 return -1;
723 case INPLACE_FLOOR_DIVIDE:
724 case INPLACE_TRUE_DIVIDE:
725 return -1;
726
727 case SLICE+0:
728 return 0;
729 case SLICE+1:
730 return -1;
731 case SLICE+2:
732 return -1;
733 case SLICE+3:
734 return -2;
735
736 case STORE_SLICE+0:
737 return -2;
738 case STORE_SLICE+1:
739 return -3;
740 case STORE_SLICE+2:
741 return -3;
742 case STORE_SLICE+3:
743 return -4;
744
745 case DELETE_SLICE+0:
746 return -1;
747 case DELETE_SLICE+1:
748 return -2;
749 case DELETE_SLICE+2:
750 return -2;
751 case DELETE_SLICE+3:
752 return -3;
753
754 case INPLACE_ADD:
755 case INPLACE_SUBTRACT:
756 case INPLACE_MULTIPLY:
757 case INPLACE_DIVIDE:
758 case INPLACE_MODULO:
759 return -1;
760 case STORE_SUBSCR:
761 return -3;
762 case STORE_MAP:
763 return -2;
764 case DELETE_SUBSCR:
765 return -2;
766
767 case BINARY_LSHIFT:
768 case BINARY_RSHIFT:
769 case BINARY_AND:
770 case BINARY_XOR:
771 case BINARY_OR:
772 return -1;
773 case INPLACE_POWER:
774 return -1;
775 case GET_ITER:
776 return 0;
777
778 case PRINT_EXPR:
779 return -1;
780 case PRINT_ITEM:
781 return -1;
782 case PRINT_NEWLINE:
783 return 0;
784 case PRINT_ITEM_TO:
785 return -2;
786 case PRINT_NEWLINE_TO:
787 return -1;
788 case INPLACE_LSHIFT:
789 case INPLACE_RSHIFT:
790 case INPLACE_AND:
791 case INPLACE_XOR:
792 case INPLACE_OR:
793 return -1;
794 case BREAK_LOOP:
795 return 0;
796 case SETUP_WITH:
797 return 4;
798 case WITH_CLEANUP:
799 return -1; /* XXX Sometimes more */
800 case LOAD_LOCALS:
801 return 1;
802 case RETURN_VALUE:
803 return -1;
804 case IMPORT_STAR:
805 return -1;
806 case EXEC_STMT:
807 return -3;
808 case YIELD_VALUE:
809 return 0;
810
811 case POP_BLOCK:
812 return 0;
813 case END_FINALLY:
814 return -3; /* or -1 or -2 if no exception occurred or
815 return/break/continue */
816 case BUILD_CLASS:
817 return -2;
818
819 case STORE_NAME:
820 return -1;
821 case DELETE_NAME:
822 return 0;
823 case UNPACK_SEQUENCE:
824 return oparg-1;
825 case FOR_ITER:
826 return 1; /* or -1, at end of iterator */
827
828 case STORE_ATTR:
829 return -2;
830 case DELETE_ATTR:
831 return -1;
832 case STORE_GLOBAL:
833 return -1;
834 case DELETE_GLOBAL:
835 return 0;
836 case DUP_TOPX:
837 return oparg;
838 case LOAD_CONST:
839 return 1;
840 case LOAD_NAME:
841 return 1;
842 case BUILD_TUPLE:
843 case BUILD_LIST:
844 case BUILD_SET:
845 return 1-oparg;
846 case BUILD_MAP:
847 return 1;
848 case LOAD_ATTR:
849 return 0;
850 case COMPARE_OP:
851 return -1;
852 case IMPORT_NAME:
853 return -1;
854 case IMPORT_FROM:
855 return 1;
856
857 case JUMP_FORWARD:
858 case JUMP_IF_TRUE_OR_POP: /* -1 if jump not taken */
859 case JUMP_IF_FALSE_OR_POP: /* "" */
860 case JUMP_ABSOLUTE:
861 return 0;
862
863 case POP_JUMP_IF_FALSE:
864 case POP_JUMP_IF_TRUE:
865 return -1;
866
867 case LOAD_GLOBAL:
868 return 1;
869
870 case CONTINUE_LOOP:
871 return 0;
872 case SETUP_LOOP:
873 case SETUP_EXCEPT:
874 case SETUP_FINALLY:
875 return 0;
876
877 case LOAD_FAST:
878 return 1;
879 case STORE_FAST:
880 return -1;
881 case DELETE_FAST:
882 return 0;
883
884 case RAISE_VARARGS:
885 return -oparg;
886 #define NARGS(o) (((o) % 256) + 2*((o) / 256))
887 case CALL_FUNCTION:
888 return -NARGS(oparg);
889 case CALL_FUNCTION_VAR:
890 case CALL_FUNCTION_KW:
891 return -NARGS(oparg)-1;
892 case CALL_FUNCTION_VAR_KW:
893 return -NARGS(oparg)-2;
894 #undef NARGS
895 case MAKE_FUNCTION:
896 return -oparg;
897 case BUILD_SLICE:
898 if (oparg == 3)
899 return -2;
900 else
901 return -1;
902
903 case MAKE_CLOSURE:
904 return -oparg-1;
905 case LOAD_CLOSURE:
906 return 1;
907 case LOAD_DEREF:
908 return 1;
909 case STORE_DEREF:
910 return -1;
911 default:
912 fprintf(stderr, "opcode = %d\n", opcode);
913 Py_FatalError("opcode_stack_effect()");
914
915 }
916 return 0; /* not reachable */
917 }
918
919 /* Add an opcode with no argument.
920 Returns 0 on failure, 1 on success.
921 */
922
923 static int
924 compiler_addop(struct compiler *c, int opcode)
925 {
926 basicblock *b;
927 struct instr *i;
928 int off;
929 off = compiler_next_instr(c, c->u->u_curblock);
930 if (off < 0)
931 return 0;
932 b = c->u->u_curblock;
933 i = &b->b_instr[off];
934 i->i_opcode = opcode;
935 i->i_hasarg = 0;
936 if (opcode == RETURN_VALUE)
937 b->b_return = 1;
938 compiler_set_lineno(c, off);
939 return 1;
940 }
941
942 static int
943 compiler_add_o(struct compiler *c, PyObject *dict, PyObject *o)
944 {
945 PyObject *t, *v;
946 Py_ssize_t arg;
947 double d;
948
949 /* necessary to make sure types aren't coerced (e.g., int and long) */
950 /* _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms */
951 if (PyFloat_Check(o)) {
952 d = PyFloat_AS_DOUBLE(o);
953 /* all we need is to make the tuple different in either the 0.0
954 * or -0.0 case from all others, just to avoid the "coercion".
955 */
956 if (d == 0.0 && copysign(1.0, d) < 0.0)
957 t = PyTuple_Pack(3, o, o->ob_type, Py_None);
958 else
959 t = PyTuple_Pack(2, o, o->ob_type);
960 }
961 #ifndef WITHOUT_COMPLEX
962 else if (PyComplex_Check(o)) {
963 Py_complex z;
964 int real_negzero, imag_negzero;
965 /* For the complex case we must make complex(x, 0.)
966 different from complex(x, -0.) and complex(0., y)
967 different from complex(-0., y), for any x and y.
968 All four complex zeros must be distinguished.*/
969 z = PyComplex_AsCComplex(o);
970 real_negzero = z.real == 0.0 && copysign(1.0, z.real) < 0.0;
971 imag_negzero = z.imag == 0.0 && copysign(1.0, z.imag) < 0.0;
972 if (real_negzero && imag_negzero) {
973 t = PyTuple_Pack(5, o, o->ob_type,
974 Py_None, Py_None, Py_None);
975 }
976 else if (imag_negzero) {
977 t = PyTuple_Pack(4, o, o->ob_type, Py_None, Py_None);
978 }
979 else if (real_negzero) {
980 t = PyTuple_Pack(3, o, o->ob_type, Py_None);
981 }
982 else {
983 t = PyTuple_Pack(2, o, o->ob_type);
984 }
985 }
986 #endif /* WITHOUT_COMPLEX */
987 else {
988 t = PyTuple_Pack(2, o, o->ob_type);
989 }
990 if (t == NULL)
991 return -1;
992
993 v = PyDict_GetItem(dict, t);
994 if (!v) {
995 arg = PyDict_Size(dict);
996 v = PyInt_FromLong(arg);
997 if (!v) {
998 Py_DECREF(t);
999 return -1;
1000 }
1001 if (PyDict_SetItem(dict, t, v) < 0) {
1002 Py_DECREF(t);
1003 Py_DECREF(v);
1004 return -1;
1005 }
1006 Py_DECREF(v);
1007 }
1008 else
1009 arg = PyInt_AsLong(v);
1010 Py_DECREF(t);
1011 return arg;
1012 }
1013
1014 static int
1015 compiler_addop_o(struct compiler *c, int opcode, PyObject *dict,
1016 PyObject *o)
1017 {
1018 int arg = compiler_add_o(c, dict, o);
1019 if (arg < 0)
1020 return 0;
1021 return compiler_addop_i(c, opcode, arg);
1022 }
1023
1024 static int
1025 compiler_addop_name(struct compiler *c, int opcode, PyObject *dict,
1026 PyObject *o)
1027 {
1028 int arg;
1029 PyObject *mangled = _Py_Mangle(c->u->u_private, o);
1030 if (!mangled)
1031 return 0;
1032 arg = compiler_add_o(c, dict, mangled);
1033 Py_DECREF(mangled);
1034 if (arg < 0)
1035 return 0;
1036 return compiler_addop_i(c, opcode, arg);
1037 }
1038
1039 /* Add an opcode with an integer argument.
1040 Returns 0 on failure, 1 on success.
1041 */
1042
1043 static int
1044 compiler_addop_i(struct compiler *c, int opcode, int oparg)
1045 {
1046 struct instr *i;
1047 int off;
1048 off = compiler_next_instr(c, c->u->u_curblock);
1049 if (off < 0)
1050 return 0;
1051 i = &c->u->u_curblock->b_instr[off];
1052 i->i_opcode = opcode;
1053 i->i_oparg = oparg;
1054 i->i_hasarg = 1;
1055 compiler_set_lineno(c, off);
1056 return 1;
1057 }
1058
1059 static int
1060 compiler_addop_j(struct compiler *c, int opcode, basicblock *b, int absolute)
1061 {
1062 struct instr *i;
1063 int off;
1064
1065 assert(b != NULL);
1066 off = compiler_next_instr(c, c->u->u_curblock);
1067 if (off < 0)
1068 return 0;
1069 i = &c->u->u_curblock->b_instr[off];
1070 i->i_opcode = opcode;
1071 i->i_target = b;
1072 i->i_hasarg = 1;
1073 if (absolute)
1074 i->i_jabs = 1;
1075 else
1076 i->i_jrel = 1;
1077 compiler_set_lineno(c, off);
1078 return 1;
1079 }
1080
1081 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd
1082 like to find better names.) NEW_BLOCK() creates a new block and sets
1083 it as the current block. NEXT_BLOCK() also creates an implicit jump
1084 from the current block to the new block.
1085 */
1086
1087 /* The returns inside these macros make it impossible to decref objects
1088 created in the local function. Local objects should use the arena.
1089 */
1090
1091
1092 #define NEW_BLOCK(C) { \
1093 if (compiler_use_new_block((C)) == NULL) \
1094 return 0; \
1095 }
1096
1097 #define NEXT_BLOCK(C) { \
1098 if (compiler_next_block((C)) == NULL) \
1099 return 0; \
1100 }
1101
1102 #define ADDOP(C, OP) { \
1103 if (!compiler_addop((C), (OP))) \
1104 return 0; \
1105 }
1106
1107 #define ADDOP_IN_SCOPE(C, OP) { \
1108 if (!compiler_addop((C), (OP))) { \
1109 compiler_exit_scope(c); \
1110 return 0; \
1111 } \
1112 }
1113
1114 #define ADDOP_O(C, OP, O, TYPE) { \
1115 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1116 return 0; \
1117 }
1118
1119 #define ADDOP_NAME(C, OP, O, TYPE) { \
1120 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1121 return 0; \
1122 }
1123
1124 #define ADDOP_I(C, OP, O) { \
1125 if (!compiler_addop_i((C), (OP), (O))) \
1126 return 0; \
1127 }
1128
1129 #define ADDOP_JABS(C, OP, O) { \
1130 if (!compiler_addop_j((C), (OP), (O), 1)) \
1131 return 0; \
1132 }
1133
1134 #define ADDOP_JREL(C, OP, O) { \
1135 if (!compiler_addop_j((C), (OP), (O), 0)) \
1136 return 0; \
1137 }
1138
1139 /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1140 the ASDL name to synthesize the name of the C type and the visit function.
1141 */
1142
1143 #define VISIT(C, TYPE, V) {\
1144 if (!compiler_visit_ ## TYPE((C), (V))) \
1145 return 0; \
1146 }
1147
1148 #define VISIT_IN_SCOPE(C, TYPE, V) {\
1149 if (!compiler_visit_ ## TYPE((C), (V))) { \
1150 compiler_exit_scope(c); \
1151 return 0; \
1152 } \
1153 }
1154
1155 #define VISIT_SLICE(C, V, CTX) {\
1156 if (!compiler_visit_slice((C), (V), (CTX))) \
1157 return 0; \
1158 }
1159
1160 #define VISIT_SEQ(C, TYPE, SEQ) { \
1161 int _i; \
1162 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1163 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1164 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1165 if (!compiler_visit_ ## TYPE((C), elt)) \
1166 return 0; \
1167 } \
1168 }
1169
1170 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1171 int _i; \
1172 asdl_seq *seq = (SEQ); /* avoid variable capture */ \
1173 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1174 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1175 if (!compiler_visit_ ## TYPE((C), elt)) { \
1176 compiler_exit_scope(c); \
1177 return 0; \
1178 } \
1179 } \
1180 }
1181
1182 static int
1183 compiler_isdocstring(stmt_ty s)
1184 {
1185 if (s->kind != Expr_kind)
1186 return 0;
1187 return s->v.Expr.value->kind == Str_kind;
1188 }
1189
1190 /* Compile a sequence of statements, checking for a docstring. */
1191
1192 static int
1193 compiler_body(struct compiler *c, asdl_seq *stmts)
1194 {
1195 int i = 0;
1196 stmt_ty st;
1197
1198 if (!asdl_seq_LEN(stmts))
1199 return 1;
1200 st = (stmt_ty)asdl_seq_GET(stmts, 0);
1201 if (compiler_isdocstring(st) && Py_OptimizeFlag < 2) {
1202 /* don't generate docstrings if -OO */
1203 i = 1;
1204 VISIT(c, expr, st->v.Expr.value);
1205 if (!compiler_nameop(c, __doc__, Store))
1206 return 0;
1207 }
1208 for (; i < asdl_seq_LEN(stmts); i++)
1209 VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
1210 return 1;
1211 }
1212
1213 static PyCodeObject *
1214 compiler_mod(struct compiler *c, mod_ty mod)
1215 {
1216 PyCodeObject *co;
1217 int addNone = 1;
1218 static PyObject *module;
1219 if (!module) {
1220 module = PyString_InternFromString("<module>");
1221 if (!module)
1222 return NULL;
1223 }
1224 /* Use 0 for firstlineno initially, will fixup in assemble(). */
1225 if (!compiler_enter_scope(c, module, mod, 0))
1226 return NULL;
1227 switch (mod->kind) {
1228 case Module_kind:
1229 if (!compiler_body(c, mod->v.Module.body)) {
1230 compiler_exit_scope(c);
1231 return 0;
1232 }
1233 break;
1234 case Interactive_kind:
1235 c->c_interactive = 1;
1236 VISIT_SEQ_IN_SCOPE(c, stmt,
1237 mod->v.Interactive.body);
1238 break;
1239 case Expression_kind:
1240 VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
1241 addNone = 0;
1242 break;
1243 case Suite_kind:
1244 PyErr_SetString(PyExc_SystemError,
1245 "suite should not be possible");
1246 return 0;
1247 default:
1248 PyErr_Format(PyExc_SystemError,
1249 "module kind %d should not be possible",
1250 mod->kind);
1251 return 0;
1252 }
1253 co = assemble(c, addNone);
1254 compiler_exit_scope(c);
1255 return co;
1256 }
1257
1258 /* The test for LOCAL must come before the test for FREE in order to
1259 handle classes where name is both local and free. The local var is
1260 a method and the free var is a free var referenced within a method.
1261 */
1262
1263 static int
1264 get_ref_type(struct compiler *c, PyObject *name)
1265 {
1266 int scope = PyST_GetScope(c->u->u_ste, name);
1267 if (scope == 0) {
1268 char buf[350];
1269 PyOS_snprintf(buf, sizeof(buf),
1270 "unknown scope for %.100s in %.100s(%s) in %s\n"
1271 "symbols: %s\nlocals: %s\nglobals: %s",
1272 PyString_AS_STRING(name),
1273 PyString_AS_STRING(c->u->u_name),
1274 PyString_AS_STRING(PyObject_Repr(c->u->u_ste->ste_id)),
1275 c->c_filename,
1276 PyString_AS_STRING(PyObject_Repr(c->u->u_ste->ste_symbols)),
1277 PyString_AS_STRING(PyObject_Repr(c->u->u_varnames)),
1278 PyString_AS_STRING(PyObject_Repr(c->u->u_names))
1279 );
1280 Py_FatalError(buf);
1281 }
1282
1283 return scope;
1284 }
1285
1286 static int
1287 compiler_lookup_arg(PyObject *dict, PyObject *name)
1288 {
1289 PyObject *k, *v;
1290 k = PyTuple_Pack(2, name, name->ob_type);
1291 if (k == NULL)
1292 return -1;
1293 v = PyDict_GetItem(dict, k);
1294 Py_DECREF(k);
1295 if (v == NULL)
1296 return -1;
1297 return PyInt_AS_LONG(v);
1298 }
1299
1300 static int
1301 compiler_make_closure(struct compiler *c, PyCodeObject *co, int args)
1302 {
1303 int i, free = PyCode_GetNumFree(co);
1304 if (free == 0) {
1305 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1306 ADDOP_I(c, MAKE_FUNCTION, args);
1307 return 1;
1308 }
1309 for (i = 0; i < free; ++i) {
1310 /* Bypass com_addop_varname because it will generate
1311 LOAD_DEREF but LOAD_CLOSURE is needed.
1312 */
1313 PyObject *name = PyTuple_GET_ITEM(co->co_freevars, i);
1314 int arg, reftype;
1315
1316 /* Special case: If a class contains a method with a
1317 free variable that has the same name as a method,
1318 the name will be considered free *and* local in the
1319 class. It should be handled by the closure, as
1320 well as by the normal name loookup logic.
1321 */
1322 reftype = get_ref_type(c, name);
1323 if (reftype == CELL)
1324 arg = compiler_lookup_arg(c->u->u_cellvars, name);
1325 else /* (reftype == FREE) */
1326 arg = compiler_lookup_arg(c->u->u_freevars, name);
1327 if (arg == -1) {
1328 printf("lookup %s in %s %d %d\n"
1329 "freevars of %s: %s\n",
1330 PyString_AS_STRING(PyObject_Repr(name)),
1331 PyString_AS_STRING(c->u->u_name),
1332 reftype, arg,
1333 PyString_AS_STRING(co->co_name),
1334 PyString_AS_STRING(PyObject_Repr(co->co_freevars)));
1335 Py_FatalError("compiler_make_closure()");
1336 }
1337 ADDOP_I(c, LOAD_CLOSURE, arg);
1338 }
1339 ADDOP_I(c, BUILD_TUPLE, free);
1340 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts);
1341 ADDOP_I(c, MAKE_CLOSURE, args);
1342 return 1;
1343 }
1344
1345 static int
1346 compiler_decorators(struct compiler *c, asdl_seq* decos)
1347 {
1348 int i;
1349
1350 if (!decos)
1351 return 1;
1352
1353 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1354 VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
1355 }
1356 return 1;
1357 }
1358
1359 static int
1360 compiler_arguments(struct compiler *c, arguments_ty args)
1361 {
1362 int i;
1363 int n = asdl_seq_LEN(args->args);
1364 /* Correctly handle nested argument lists */
1365 for (i = 0; i < n; i++) {
1366 expr_ty arg = (expr_ty)asdl_seq_GET(args->args, i);
1367 if (arg->kind == Tuple_kind) {
1368 PyObject *id = PyString_FromFormat(".%d", i);
1369 if (id == NULL) {
1370 return 0;
1371 }
1372 if (!compiler_nameop(c, id, Load)) {
1373 Py_DECREF(id);
1374 return 0;
1375 }
1376 Py_DECREF(id);
1377 VISIT(c, expr, arg);
1378 }
1379 }
1380 return 1;
1381 }
1382
1383 static int
1384 compiler_function(struct compiler *c, stmt_ty s)
1385 {
1386 PyCodeObject *co;
1387 PyObject *first_const = Py_None;
1388 arguments_ty args = s->v.FunctionDef.args;
1389 asdl_seq* decos = s->v.FunctionDef.decorator_list;
1390 stmt_ty st;
1391 int i, n, docstring;
1392
1393 assert(s->kind == FunctionDef_kind);
1394
1395 if (!compiler_decorators(c, decos))
1396 return 0;
1397 if (args->defaults)
1398 VISIT_SEQ(c, expr, args->defaults);
1399 if (!compiler_enter_scope(c, s->v.FunctionDef.name, (void *)s,
1400 s->lineno))
1401 return 0;
1402
1403 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, 0);
1404 docstring = compiler_isdocstring(st);
1405 if (docstring && Py_OptimizeFlag < 2)
1406 first_const = st->v.Expr.value->v.Str.s;
1407 if (compiler_add_o(c, c->u->u_consts, first_const) < 0) {
1408 compiler_exit_scope(c);
1409 return 0;
1410 }
1411
1412 /* unpack nested arguments */
1413 compiler_arguments(c, args);
1414
1415 c->u->u_argcount = asdl_seq_LEN(args->args);
1416 n = asdl_seq_LEN(s->v.FunctionDef.body);
1417 /* if there was a docstring, we need to skip the first statement */
1418 for (i = docstring; i < n; i++) {
1419 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, i);
1420 VISIT_IN_SCOPE(c, stmt, st);
1421 }
1422 co = assemble(c, 1);
1423 compiler_exit_scope(c);
1424 if (co == NULL)
1425 return 0;
1426
1427 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1428 Py_DECREF(co);
1429
1430 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1431 ADDOP_I(c, CALL_FUNCTION, 1);
1432 }
1433
1434 return compiler_nameop(c, s->v.FunctionDef.name, Store);
1435 }
1436
1437 static int
1438 compiler_class(struct compiler *c, stmt_ty s)
1439 {
1440 int n, i;
1441 PyCodeObject *co;
1442 PyObject *str;
1443 asdl_seq* decos = s->v.ClassDef.decorator_list;
1444
1445 if (!compiler_decorators(c, decos))
1446 return 0;
1447
1448 /* push class name on stack, needed by BUILD_CLASS */
1449 ADDOP_O(c, LOAD_CONST, s->v.ClassDef.name, consts);
1450 /* push the tuple of base classes on the stack */
1451 n = asdl_seq_LEN(s->v.ClassDef.bases);
1452 if (n > 0)
1453 VISIT_SEQ(c, expr, s->v.ClassDef.bases);
1454 ADDOP_I(c, BUILD_TUPLE, n);
1455 if (!compiler_enter_scope(c, s->v.ClassDef.name, (void *)s,
1456 s->lineno))
1457 return 0;
1458 Py_XDECREF(c->u->u_private);
1459 c->u->u_private = s->v.ClassDef.name;
1460 Py_INCREF(c->u->u_private);
1461 str = PyString_InternFromString("__name__");
1462 if (!str || !compiler_nameop(c, str, Load)) {
1463 Py_XDECREF(str);
1464 compiler_exit_scope(c);
1465 return 0;
1466 }
1467
1468 Py_DECREF(str);
1469 str = PyString_InternFromString("__module__");
1470 if (!str || !compiler_nameop(c, str, Store)) {
1471 Py_XDECREF(str);
1472 compiler_exit_scope(c);
1473 return 0;
1474 }
1475 Py_DECREF(str);
1476
1477 if (!compiler_body(c, s->v.ClassDef.body)) {
1478 compiler_exit_scope(c);
1479 return 0;
1480 }
1481
1482 ADDOP_IN_SCOPE(c, LOAD_LOCALS);
1483 ADDOP_IN_SCOPE(c, RETURN_VALUE);
1484 co = assemble(c, 1);
1485 compiler_exit_scope(c);
1486 if (co == NULL)
1487 return 0;
1488
1489 compiler_make_closure(c, co, 0);
1490 Py_DECREF(co);
1491
1492 ADDOP_I(c, CALL_FUNCTION, 0);
1493 ADDOP(c, BUILD_CLASS);
1494 /* apply decorators */
1495 for (i = 0; i < asdl_seq_LEN(decos); i++) {
1496 ADDOP_I(c, CALL_FUNCTION, 1);
1497 }
1498 if (!compiler_nameop(c, s->v.ClassDef.name, Store))
1499 return 0;
1500 return 1;
1501 }
1502
1503 static int
1504 compiler_ifexp(struct compiler *c, expr_ty e)
1505 {
1506 basicblock *end, *next;
1507
1508 assert(e->kind == IfExp_kind);
1509 end = compiler_new_block(c);
1510 if (end == NULL)
1511 return 0;
1512 next = compiler_new_block(c);
1513 if (next == NULL)
1514 return 0;
1515 VISIT(c, expr, e->v.IfExp.test);
1516 ADDOP_JABS(c, POP_JUMP_IF_FALSE, next);
1517 VISIT(c, expr, e->v.IfExp.body);
1518 ADDOP_JREL(c, JUMP_FORWARD, end);
1519 compiler_use_next_block(c, next);
1520 VISIT(c, expr, e->v.IfExp.orelse);
1521 compiler_use_next_block(c, end);
1522 return 1;
1523 }
1524
1525 static int
1526 compiler_lambda(struct compiler *c, expr_ty e)
1527 {
1528 PyCodeObject *co;
1529 static identifier name;
1530 arguments_ty args = e->v.Lambda.args;
1531 assert(e->kind == Lambda_kind);
1532
1533 if (!name) {
1534 name = PyString_InternFromString("<lambda>");
1535 if (!name)
1536 return 0;
1537 }
1538
1539 if (args->defaults)
1540 VISIT_SEQ(c, expr, args->defaults);
1541 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
1542 return 0;
1543
1544 /* unpack nested arguments */
1545 compiler_arguments(c, args);
1546
1547 /* Make None the first constant, so the lambda can't have a
1548 docstring. */
1549 if (compiler_add_o(c, c->u->u_consts, Py_None) < 0)
1550 return 0;
1551
1552 c->u->u_argcount = asdl_seq_LEN(args->args);
1553 VISIT_IN_SCOPE(c, expr, e->v.Lambda.body);
1554 if (c->u->u_ste->ste_generator) {
1555 ADDOP_IN_SCOPE(c, POP_TOP);
1556 }
1557 else {
1558 ADDOP_IN_SCOPE(c, RETURN_VALUE);
1559 }
1560 co = assemble(c, 1);
1561 compiler_exit_scope(c);
1562 if (co == NULL)
1563 return 0;
1564
1565 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults));
1566 Py_DECREF(co);
1567
1568 return 1;
1569 }
1570
1571 static int
1572 compiler_print(struct compiler *c, stmt_ty s)
1573 {
1574 int i, n;
1575 bool dest;
1576
1577 assert(s->kind == Print_kind);
1578 n = asdl_seq_LEN(s->v.Print.values);
1579 dest = false;
1580 if (s->v.Print.dest) {
1581 VISIT(c, expr, s->v.Print.dest);
1582 dest = true;
1583 }
1584 for (i = 0; i < n; i++) {
1585 expr_ty e = (expr_ty)asdl_seq_GET(s->v.Print.values, i);
1586 if (dest) {
1587 ADDOP(c, DUP_TOP);
1588 VISIT(c, expr, e);
1589 ADDOP(c, ROT_TWO);
1590 ADDOP(c, PRINT_ITEM_TO);
1591 }
1592 else {
1593 VISIT(c, expr, e);
1594 ADDOP(c, PRINT_ITEM);
1595 }
1596 }
1597 if (s->v.Print.nl) {
1598 if (dest)
1599 ADDOP(c, PRINT_NEWLINE_TO)
1600 else
1601 ADDOP(c, PRINT_NEWLINE)
1602 }
1603 else if (dest)
1604 ADDOP(c, POP_TOP);
1605 return 1;
1606 }
1607
1608 static int
1609 compiler_if(struct compiler *c, stmt_ty s)
1610 {
1611 basicblock *end, *next;
1612 int constant;
1613 assert(s->kind == If_kind);
1614 end = compiler_new_block(c);
1615 if (end == NULL)
1616 return 0;
1617
1618 constant = expr_constant(s->v.If.test);
1619 /* constant = 0: "if 0"
1620 * constant = 1: "if 1", "if 2", ...
1621 * constant = -1: rest */
1622 if (constant == 0) {
1623 if (s->v.If.orelse)
1624 VISIT_SEQ(c, stmt, s->v.If.orelse);
1625 } else if (constant == 1) {
1626 VISIT_SEQ(c, stmt, s->v.If.body);
1627 } else {
1628 if (s->v.If.orelse) {
1629 next = compiler_new_block(c);
1630 if (next == NULL)
1631 return 0;
1632 }
1633 else
1634 next = end;
1635 VISIT(c, expr, s->v.If.test);
1636 ADDOP_JABS(c, POP_JUMP_IF_FALSE, next);
1637 VISIT_SEQ(c, stmt, s->v.If.body);
1638 ADDOP_JREL(c, JUMP_FORWARD, end);
1639 if (s->v.If.orelse) {
1640 compiler_use_next_block(c, next);
1641 VISIT_SEQ(c, stmt, s->v.If.orelse);
1642 }
1643 }
1644 compiler_use_next_block(c, end);
1645 return 1;
1646 }
1647
1648 static int
1649 compiler_for(struct compiler *c, stmt_ty s)
1650 {
1651 basicblock *start, *cleanup, *end;
1652
1653 start = compiler_new_block(c);
1654 cleanup = compiler_new_block(c);
1655 end = compiler_new_block(c);
1656 if (start == NULL || end == NULL || cleanup == NULL)
1657 return 0;
1658 ADDOP_JREL(c, SETUP_LOOP, end);
1659 if (!compiler_push_fblock(c, LOOP, start))
1660 return 0;
1661 VISIT(c, expr, s->v.For.iter);
1662 ADDOP(c, GET_ITER);
1663 compiler_use_next_block(c, start);
1664 ADDOP_JREL(c, FOR_ITER, cleanup);
1665 VISIT(c, expr, s->v.For.target);
1666 VISIT_SEQ(c, stmt, s->v.For.body);
1667 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
1668 compiler_use_next_block(c, cleanup);
1669 ADDOP(c, POP_BLOCK);
1670 compiler_pop_fblock(c, LOOP, start);
1671 VISIT_SEQ(c, stmt, s->v.For.orelse);
1672 compiler_use_next_block(c, end);
1673 return 1;
1674 }
1675
1676 static int
1677 compiler_while(struct compiler *c, stmt_ty s)
1678 {
1679 basicblock *loop, *orelse, *end, *anchor = NULL;
1680 int constant = expr_constant(s->v.While.test);
1681
1682 if (constant == 0) {
1683 if (s->v.While.orelse)
1684 VISIT_SEQ(c, stmt, s->v.While.orelse);
1685 return 1;
1686 }
1687 loop = compiler_new_block(c);
1688 end = compiler_new_block(c);
1689 if (constant == -1) {
1690 anchor = compiler_new_block(c);
1691 if (anchor == NULL)
1692 return 0;
1693 }
1694 if (loop == NULL || end == NULL)
1695 return 0;
1696 if (s->v.While.orelse) {
1697 orelse = compiler_new_block(c);
1698 if (orelse == NULL)
1699 return 0;
1700 }
1701 else
1702 orelse = NULL;
1703
1704 ADDOP_JREL(c, SETUP_LOOP, end);
1705 compiler_use_next_block(c, loop);
1706 if (!compiler_push_fblock(c, LOOP, loop))
1707 return 0;
1708 if (constant == -1) {
1709 VISIT(c, expr, s->v.While.test);
1710 ADDOP_JABS(c, POP_JUMP_IF_FALSE, anchor);
1711 }
1712 VISIT_SEQ(c, stmt, s->v.While.body);
1713 ADDOP_JABS(c, JUMP_ABSOLUTE, loop);
1714
1715 /* XXX should the two POP instructions be in a separate block
1716 if there is no else clause ?
1717 */
1718
1719 if (constant == -1)
1720 compiler_use_next_block(c, anchor);
1721 ADDOP(c, POP_BLOCK);
1722 compiler_pop_fblock(c, LOOP, loop);
1723 if (orelse != NULL) /* what if orelse is just pass? */
1724 VISIT_SEQ(c, stmt, s->v.While.orelse);
1725 compiler_use_next_block(c, end);
1726
1727 return 1;
1728 }
1729
1730 static int
1731 compiler_continue(struct compiler *c)
1732 {
1733 static const char LOOP_ERROR_MSG[] = "'continue' not properly in loop";
1734 static const char IN_FINALLY_ERROR_MSG[] =
1735 "'continue' not supported inside 'finally' clause";
1736 int i;
1737
1738 if (!c->u->u_nfblocks)
1739 return compiler_error(c, LOOP_ERROR_MSG);
1740 i = c->u->u_nfblocks - 1;
1741 switch (c->u->u_fblock[i].fb_type) {
1742 case LOOP:
1743 ADDOP_JABS(c, JUMP_ABSOLUTE, c->u->u_fblock[i].fb_block);
1744 break;
1745 case EXCEPT:
1746 case FINALLY_TRY:
1747 while (--i >= 0 && c->u->u_fblock[i].fb_type != LOOP) {
1748 /* Prevent continue anywhere under a finally
1749 even if hidden in a sub-try or except. */
1750 if (c->u->u_fblock[i].fb_type == FINALLY_END)
1751 return compiler_error(c, IN_FINALLY_ERROR_MSG);
1752 }
1753 if (i == -1)
1754 return compiler_error(c, LOOP_ERROR_MSG);
1755 ADDOP_JABS(c, CONTINUE_LOOP, c->u->u_fblock[i].fb_block);
1756 break;
1757 case FINALLY_END:
1758 return compiler_error(c, IN_FINALLY_ERROR_MSG);
1759 }
1760
1761 return 1;
1762 }
1763
1764 /* Code generated for "try: <body> finally: <finalbody>" is as follows:
1765
1766 SETUP_FINALLY L
1767 <code for body>
1768 POP_BLOCK
1769 LOAD_CONST <None>
1770 L: <code for finalbody>
1771 END_FINALLY
1772
1773 The special instructions use the block stack. Each block
1774 stack entry contains the instruction that created it (here
1775 SETUP_FINALLY), the level of the value stack at the time the
1776 block stack entry was created, and a label (here L).
1777
1778 SETUP_FINALLY:
1779 Pushes the current value stack level and the label
1780 onto the block stack.
1781 POP_BLOCK:
1782 Pops en entry from the block stack, and pops the value
1783 stack until its level is the same as indicated on the
1784 block stack. (The label is ignored.)
1785 END_FINALLY:
1786 Pops a variable number of entries from the *value* stack
1787 and re-raises the exception they specify. The number of
1788 entries popped depends on the (pseudo) exception type.
1789
1790 The block stack is unwound when an exception is raised:
1791 when a SETUP_FINALLY entry is found, the exception is pushed
1792 onto the value stack (and the exception condition is cleared),
1793 and the interpreter jumps to the label gotten from the block
1794 stack.
1795 */
1796
1797 static int
1798 compiler_try_finally(struct compiler *c, stmt_ty s)
1799 {
1800 basicblock *body, *end;
1801 body = compiler_new_block(c);
1802 end = compiler_new_block(c);
1803 if (body == NULL || end == NULL)
1804 return 0;
1805
1806 ADDOP_JREL(c, SETUP_FINALLY, end);
1807 compiler_use_next_block(c, body);
1808 if (!compiler_push_fblock(c, FINALLY_TRY, body))
1809 return 0;
1810 VISIT_SEQ(c, stmt, s->v.TryFinally.body);
1811 ADDOP(c, POP_BLOCK);
1812 compiler_pop_fblock(c, FINALLY_TRY, body);
1813
1814 ADDOP_O(c, LOAD_CONST, Py_None, consts);
1815 compiler_use_next_block(c, end);
1816 if (!compiler_push_fblock(c, FINALLY_END, end))
1817 return 0;
1818 VISIT_SEQ(c, stmt, s->v.TryFinally.finalbody);
1819 ADDOP(c, END_FINALLY);
1820 compiler_pop_fblock(c, FINALLY_END, end);
1821
1822 return 1;
1823 }
1824
1825 /*
1826 Code generated for "try: S except E1, V1: S1 except E2, V2: S2 ...":
1827 (The contents of the value stack is shown in [], with the top
1828 at the right; 'tb' is trace-back info, 'val' the exception's
1829 associated value, and 'exc' the exception.)
1830
1831 Value stack Label Instruction Argument
1832 [] SETUP_EXCEPT L1
1833 [] <code for S>
1834 [] POP_BLOCK
1835 [] JUMP_FORWARD L0
1836
1837 [tb, val, exc] L1: DUP )
1838 [tb, val, exc, exc] <evaluate E1> )
1839 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
1840 [tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
1841 [tb, val, exc] POP
1842 [tb, val] <assign to V1> (or POP if no V1)
1843 [tb] POP
1844 [] <code for S1>
1845 JUMP_FORWARD L0
1846
1847 [tb, val, exc] L2: DUP
1848 .............................etc.......................
1849
1850 [tb, val, exc] Ln+1: END_FINALLY # re-raise exception
1851
1852 [] L0: <next statement>
1853
1854 Of course, parts are not generated if Vi or Ei is not present.
1855 */
1856 static int
1857 compiler_try_except(struct compiler *c, stmt_ty s)
1858 {
1859 basicblock *body, *orelse, *except, *end;
1860 int i, n;
1861
1862 body = compiler_new_block(c);
1863 except = compiler_new_block(c);
1864 orelse = compiler_new_block(c);
1865 end = compiler_new_block(c);
1866 if (body == NULL || except == NULL || orelse == NULL || end == NULL)
1867 return 0;
1868 ADDOP_JREL(c, SETUP_EXCEPT, except);
1869 compiler_use_next_block(c, body);
1870 if (!compiler_push_fblock(c, EXCEPT, body))
1871 return 0;
1872 VISIT_SEQ(c, stmt, s->v.TryExcept.body);
1873 ADDOP(c, POP_BLOCK);
1874 compiler_pop_fblock(c, EXCEPT, body);
1875 ADDOP_JREL(c, JUMP_FORWARD, orelse);
1876 n = asdl_seq_LEN(s->v.TryExcept.handlers);
1877 compiler_use_next_block(c, except);
1878 for (i = 0; i < n; i++) {
1879 excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1880 s->v.TryExcept.handlers, i);
1881 if (!handler->v.ExceptHandler.type && i < n-1)
1882 return compiler_error(c, "default 'except:' must be last");
1883 c->u->u_lineno_set = false;
1884 c->u->u_lineno = handler->lineno;
1885 except = compiler_new_block(c);
1886 if (except == NULL)
1887 return 0;
1888 if (handler->v.ExceptHandler.type) {
1889 ADDOP(c, DUP_TOP);
1890 VISIT(c, expr, handler->v.ExceptHandler.type);
1891 ADDOP_I(c, COMPARE_OP, PyCmp_EXC_MATCH);
1892 ADDOP_JABS(c, POP_JUMP_IF_FALSE, except);
1893 }
1894 ADDOP(c, POP_TOP);
1895 if (handler->v.ExceptHandler.name) {
1896 VISIT(c, expr, handler->v.ExceptHandler.name);
1897 }
1898 else {
1899 ADDOP(c, POP_TOP);
1900 }
1901 ADDOP(c, POP_TOP);
1902 VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
1903 ADDOP_JREL(c, JUMP_FORWARD, end);
1904 compiler_use_next_block(c, except);
1905 }
1906 ADDOP(c, END_FINALLY);
1907 compiler_use_next_block(c, orelse);
1908 VISIT_SEQ(c, stmt, s->v.TryExcept.orelse);
1909 compiler_use_next_block(c, end);
1910 return 1;
1911 }
1912
1913 static int
1914 compiler_import_as(struct compiler *c, identifier name, identifier asname)
1915 {
1916 /* The IMPORT_NAME opcode was already generated. This function
1917 merely needs to bind the result to a name.
1918
1919 If there is a dot in name, we need to split it and emit a
1920 LOAD_ATTR for each name.
1921 */
1922 const char *src = PyString_AS_STRING(name);
1923 const char *dot = strchr(src, '.');
1924 if (dot) {
1925 /* Consume the base module name to get the first attribute */
1926 src = dot + 1;
1927 while (dot) {
1928 /* NB src is only defined when dot != NULL */
1929 PyObject *attr;
1930 dot = strchr(src, '.');
1931 attr = PyString_FromStringAndSize(src,
1932 dot ? dot - src : strlen(src));
1933 if (!attr)
1934 return -1;
1935 ADDOP_O(c, LOAD_ATTR, attr, names);
1936 Py_DECREF(attr);
1937 src = dot + 1;
1938 }
1939 }
1940 return compiler_nameop(c, asname, Store);
1941 }
1942
1943 static int
1944 compiler_import(struct compiler *c, stmt_ty s)
1945 {
1946 /* The Import node stores a module name like a.b.c as a single
1947 string. This is convenient for all cases except
1948 import a.b.c as d
1949 where we need to parse that string to extract the individual
1950 module names.
1951 XXX Perhaps change the representation to make this case simpler?
1952 */
1953 int i, n = asdl_seq_LEN(s->v.Import.names);
1954
1955 for (i = 0; i < n; i++) {
1956 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i);
1957 int r;
1958 PyObject *level;
1959
1960 if (c->c_flags && (c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT))
1961 level = PyInt_FromLong(0);
1962 else
1963 level = PyInt_FromLong(-1);
1964
1965 if (level == NULL)
1966 return 0;
1967
1968 ADDOP_O(c, LOAD_CONST, level, consts);
1969 Py_DECREF(level);
1970 ADDOP_O(c, LOAD_CONST, Py_None, consts);
1971 ADDOP_NAME(c, IMPORT_NAME, alias->name, names);
1972
1973 if (alias->asname) {
1974 r = compiler_import_as(c, alias->name, alias->asname);
1975 if (!r)
1976 return r;
1977 }
1978 else {
1979 identifier tmp = alias->name;
1980 const char *base = PyString_AS_STRING(alias->name);
1981 char *dot = strchr(base, '.');
1982 if (dot)
1983 tmp = PyString_FromStringAndSize(base,
1984 dot - base);
1985 r = compiler_nameop(c, tmp, Store);
1986 if (dot) {
1987 Py_DECREF(tmp);
1988 }
1989 if (!r)
1990 return r;
1991 }
1992 }
1993 return 1;
1994 }
1995
1996 static int
1997 compiler_from_import(struct compiler *c, stmt_ty s)
1998 {
1999 int i, n = asdl_seq_LEN(s->v.ImportFrom.names);
2000
2001 PyObject *names = PyTuple_New(n);
2002 PyObject *level;
2003 static PyObject *empty_string;
2004
2005 if (!empty_string) {
2006 empty_string = PyString_FromString("");
2007 if (!empty_string)
2008 return 0;
2009 }
2010
2011 if (!names)
2012 return 0;
2013
2014 if (s->v.ImportFrom.level == 0 && c->c_flags &&
2015 !(c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT))
2016 level = PyInt_FromLong(-1);
2017 else
2018 level = PyInt_FromLong(s->v.ImportFrom.level);
2019
2020 if (!level) {
2021 Py_DECREF(names);
2022 return 0;
2023 }
2024
2025 /* build up the names */
2026 for (i = 0; i < n; i++) {
2027 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
2028 Py_INCREF(alias->name);
2029 PyTuple_SET_ITEM(names, i, alias->name);
2030 }
2031
2032 if (s->lineno > c->c_future->ff_lineno && s->v.ImportFrom.module &&
2033 !strcmp(PyString_AS_STRING(s->v.ImportFrom.module), "__future__")) {
2034 Py_DECREF(level);
2035 Py_DECREF(names);
2036 return compiler_error(c, "from __future__ imports must occur "
2037 "at the beginning of the file");
2038 }
2039
2040 ADDOP_O(c, LOAD_CONST, level, consts);
2041 Py_DECREF(level);
2042 ADDOP_O(c, LOAD_CONST, names, consts);
2043 Py_DECREF(names);
2044 if (s->v.ImportFrom.module) {
2045 ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names);
2046 }
2047 else {
2048 ADDOP_NAME(c, IMPORT_NAME, empty_string, names);
2049 }
2050 for (i = 0; i < n; i++) {
2051 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
2052 identifier store_name;
2053
2054 if (i == 0 && *PyString_AS_STRING(alias->name) == '*') {
2055 assert(n == 1);
2056 ADDOP(c, IMPORT_STAR);
2057 return 1;
2058 }
2059
2060 ADDOP_NAME(c, IMPORT_FROM, alias->name, names);
2061 store_name = alias->name;
2062 if (alias->asname)
2063 store_name = alias->asname;
2064
2065 if (!compiler_nameop(c, store_name, Store)) {
2066 Py_DECREF(names);
2067 return 0;
2068 }
2069 }
2070 /* remove imported module */
2071 ADDOP(c, POP_TOP);
2072 return 1;
2073 }
2074
2075 static int
2076 compiler_assert(struct compiler *c, stmt_ty s)
2077 {
2078 static PyObject *assertion_error = NULL;
2079 basicblock *end;
2080
2081 if (Py_OptimizeFlag)
2082 return 1;
2083 if (assertion_error == NULL) {
2084 assertion_error = PyString_InternFromString("AssertionError");
2085 if (assertion_error == NULL)
2086 return 0;
2087 }
2088 if (s->v.Assert.test->kind == Tuple_kind &&
2089 asdl_seq_LEN(s->v.Assert.test->v.Tuple.elts) > 0) {
2090 const char* msg =
2091 "assertion is always true, perhaps remove parentheses?";
2092 if (PyErr_WarnExplicit(PyExc_SyntaxWarning, msg, c->c_filename,
2093 c->u->u_lineno, NULL, NULL) == -1)
2094 return 0;
2095 }
2096 VISIT(c, expr, s->v.Assert.test);
2097 end = compiler_new_block(c);
2098 if (end == NULL)
2099 return 0;
2100 ADDOP_JABS(c, POP_JUMP_IF_TRUE, end);
2101 ADDOP_O(c, LOAD_GLOBAL, assertion_error, names);
2102 if (s->v.Assert.msg) {
2103 VISIT(c, expr, s->v.Assert.msg);
2104 ADDOP_I(c, CALL_FUNCTION, 1);
2105 }
2106 ADDOP_I(c, RAISE_VARARGS, 1);
2107 compiler_use_next_block(c, end);
2108 return 1;
2109 }
2110
2111 static int
2112 compiler_visit_stmt(struct compiler *c, stmt_ty s)
2113 {
2114 int i, n;
2115
2116 /* Always assign a lineno to the next instruction for a stmt. */
2117 c->u->u_lineno = s->lineno;
2118 c->u->u_lineno_set = false;
2119
2120 switch (s->kind) {
2121 case FunctionDef_kind:
2122 return compiler_function(c, s);
2123 case ClassDef_kind:
2124 return compiler_class(c, s);
2125 case Return_kind:
2126 if (c->u->u_ste->ste_type != FunctionBlock)
2127 return compiler_error(c, "'return' outside function");
2128 if (s->v.Return.value) {
2129 VISIT(c, expr, s->v.Return.value);
2130 }
2131 else
2132 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2133 ADDOP(c, RETURN_VALUE);
2134 break;
2135 case Delete_kind:
2136 VISIT_SEQ(c, expr, s->v.Delete.targets)
2137 break;
2138 case Assign_kind:
2139 n = asdl_seq_LEN(s->v.Assign.targets);
2140 VISIT(c, expr, s->v.Assign.value);
2141 for (i = 0; i < n; i++) {
2142 if (i < n - 1)
2143 ADDOP(c, DUP_TOP);
2144 VISIT(c, expr,
2145 (expr_ty)asdl_seq_GET(s->v.Assign.targets, i));
2146 }
2147 break;
2148 case AugAssign_kind:
2149 return compiler_augassign(c, s);
2150 case Print_kind:
2151 return compiler_print(c, s);
2152 case For_kind:
2153 return compiler_for(c, s);
2154 case While_kind:
2155 return compiler_while(c, s);
2156 case If_kind:
2157 return compiler_if(c, s);
2158 case Raise_kind:
2159 n = 0;
2160 if (s->v.Raise.type) {
2161 VISIT(c, expr, s->v.Raise.type);
2162 n++;
2163 if (s->v.Raise.inst) {
2164 VISIT(c, expr, s->v.Raise.inst);
2165 n++;
2166 if (s->v.Raise.tback) {
2167 VISIT(c, expr, s->v.Raise.tback);
2168 n++;
2169 }
2170 }
2171 }
2172 ADDOP_I(c, RAISE_VARARGS, n);
2173 break;
2174 case TryExcept_kind:
2175 return compiler_try_except(c, s);
2176 case TryFinally_kind:
2177 return compiler_try_finally(c, s);
2178 case Assert_kind:
2179 return compiler_assert(c, s);
2180 case Import_kind:
2181 return compiler_import(c, s);
2182 case ImportFrom_kind:
2183 return compiler_from_import(c, s);
2184 case Exec_kind:
2185 VISIT(c, expr, s->v.Exec.body);
2186 if (s->v.Exec.globals) {
2187 VISIT(c, expr, s->v.Exec.globals);
2188 if (s->v.Exec.locals) {
2189 VISIT(c, expr, s->v.Exec.locals);
2190 } else {
2191 ADDOP(c, DUP_TOP);
2192 }
2193 } else {
2194 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2195 ADDOP(c, DUP_TOP);
2196 }
2197 ADDOP(c, EXEC_STMT);
2198 break;
2199 case Global_kind:
2200 break;
2201 case Expr_kind:
2202 if (c->c_interactive && c->c_nestlevel <= 1) {
2203 VISIT(c, expr, s->v.Expr.value);
2204 ADDOP(c, PRINT_EXPR);
2205 }
2206 else if (s->v.Expr.value->kind != Str_kind &&
2207 s->v.Expr.value->kind != Num_kind) {
2208 VISIT(c, expr, s->v.Expr.value);
2209 ADDOP(c, POP_TOP);
2210 }
2211 break;
2212 case Pass_kind:
2213 break;
2214 case Break_kind:
2215 if (!compiler_in_loop(c))
2216 return compiler_error(c, "'break' outside loop");
2217 ADDOP(c, BREAK_LOOP);
2218 break;
2219 case Continue_kind:
2220 return compiler_continue(c);
2221 case With_kind:
2222 return compiler_with(c, s);
2223 }
2224 return 1;
2225 }
2226
2227 static int
2228 unaryop(unaryop_ty op)
2229 {
2230 switch (op) {
2231 case Invert:
2232 return UNARY_INVERT;
2233 case Not:
2234 return UNARY_NOT;
2235 case UAdd:
2236 return UNARY_POSITIVE;
2237 case USub:
2238 return UNARY_NEGATIVE;
2239 default:
2240 PyErr_Format(PyExc_SystemError,
2241 "unary op %d should not be possible", op);
2242 return 0;
2243 }
2244 }
2245
2246 static int
2247 binop(struct compiler *c, operator_ty op)
2248 {
2249 switch (op) {
2250 case Add:
2251 return BINARY_ADD;
2252 case Sub:
2253 return BINARY_SUBTRACT;
2254 case Mult:
2255 return BINARY_MULTIPLY;
2256 case Div:
2257 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2258 return BINARY_TRUE_DIVIDE;
2259 else
2260 return BINARY_DIVIDE;
2261 case Mod:
2262 return BINARY_MODULO;
2263 case Pow:
2264 return BINARY_POWER;
2265 case LShift:
2266 return BINARY_LSHIFT;
2267 case RShift:
2268 return BINARY_RSHIFT;
2269 case BitOr:
2270 return BINARY_OR;
2271 case BitXor:
2272 return BINARY_XOR;
2273 case BitAnd:
2274 return BINARY_AND;
2275 case FloorDiv:
2276 return BINARY_FLOOR_DIVIDE;
2277 default:
2278 PyErr_Format(PyExc_SystemError,
2279 "binary op %d should not be possible", op);
2280 return 0;
2281 }
2282 }
2283
2284 static int
2285 cmpop(cmpop_ty op)
2286 {
2287 switch (op) {
2288 case Eq:
2289 return PyCmp_EQ;
2290 case NotEq:
2291 return PyCmp_NE;
2292 case Lt:
2293 return PyCmp_LT;
2294 case LtE:
2295 return PyCmp_LE;
2296 case Gt:
2297 return PyCmp_GT;
2298 case GtE:
2299 return PyCmp_GE;
2300 case Is:
2301 return PyCmp_IS;
2302 case IsNot:
2303 return PyCmp_IS_NOT;
2304 case In:
2305 return PyCmp_IN;
2306 case NotIn:
2307 return PyCmp_NOT_IN;
2308 default:
2309 return PyCmp_BAD;
2310 }
2311 }
2312
2313 static int
2314 inplace_binop(struct compiler *c, operator_ty op)
2315 {
2316 switch (op) {
2317 case Add:
2318 return INPLACE_ADD;
2319 case Sub:
2320 return INPLACE_SUBTRACT;
2321 case Mult:
2322 return INPLACE_MULTIPLY;
2323 case Div:
2324 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION)
2325 return INPLACE_TRUE_DIVIDE;
2326 else
2327 return INPLACE_DIVIDE;
2328 case Mod:
2329 return INPLACE_MODULO;
2330 case Pow:
2331 return INPLACE_POWER;
2332 case LShift:
2333 return INPLACE_LSHIFT;
2334 case RShift:
2335 return INPLACE_RSHIFT;
2336 case BitOr:
2337 return INPLACE_OR;
2338 case BitXor:
2339 return INPLACE_XOR;
2340 case BitAnd:
2341 return INPLACE_AND;
2342 case FloorDiv:
2343 return INPLACE_FLOOR_DIVIDE;
2344 default:
2345 PyErr_Format(PyExc_SystemError,
2346 "inplace binary op %d should not be possible", op);
2347 return 0;
2348 }
2349 }
2350
2351 static int
2352 compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx)
2353 {
2354 int op, scope, arg;
2355 enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype;
2356
2357 PyObject *dict = c->u->u_names;
2358 PyObject *mangled;
2359 /* XXX AugStore isn't used anywhere! */
2360
2361 mangled = _Py_Mangle(c->u->u_private, name);
2362 if (!mangled)
2363 return 0;
2364
2365 op = 0;
2366 optype = OP_NAME;
2367 scope = PyST_GetScope(c->u->u_ste, mangled);
2368 switch (scope) {
2369 case FREE:
2370 dict = c->u->u_freevars;
2371 optype = OP_DEREF;
2372 break;
2373 case CELL:
2374 dict = c->u->u_cellvars;
2375 optype = OP_DEREF;
2376 break;
2377 case LOCAL:
2378 if (c->u->u_ste->ste_type == FunctionBlock)
2379 optype = OP_FAST;
2380 break;
2381 case GLOBAL_IMPLICIT:
2382 if (c->u->u_ste->ste_type == FunctionBlock &&
2383 !c->u->u_ste->ste_unoptimized)
2384 optype = OP_GLOBAL;
2385 break;
2386 case GLOBAL_EXPLICIT:
2387 optype = OP_GLOBAL;
2388 break;
2389 default:
2390 /* scope can be 0 */
2391 break;
2392 }
2393
2394 /* XXX Leave assert here, but handle __doc__ and the like better */
2395 assert(scope || PyString_AS_STRING(name)[0] == '_');
2396
2397 switch (optype) {
2398 case OP_DEREF:
2399 switch (ctx) {
2400 case Load: op = LOAD_DEREF; break;
2401 case Store: op = STORE_DEREF; break;
2402 case AugLoad:
2403 case AugStore:
2404 break;
2405 case Del:
2406 PyErr_Format(PyExc_SyntaxError,
2407 "can not delete variable '%s' referenced "
2408 "in nested scope",
2409 PyString_AS_STRING(name));
2410 Py_DECREF(mangled);
2411 return 0;
2412 case Param:
2413 default:
2414 PyErr_SetString(PyExc_SystemError,
2415 "param invalid for deref variable");
2416 return 0;
2417 }
2418 break;
2419 case OP_FAST:
2420 switch (ctx) {
2421 case Load: op = LOAD_FAST; break;
2422 case Store: op = STORE_FAST; break;
2423 case Del: op = DELETE_FAST; break;
2424 case AugLoad:
2425 case AugStore:
2426 break;
2427 case Param:
2428 default:
2429 PyErr_SetString(PyExc_SystemError,
2430 "param invalid for local variable");
2431 return 0;
2432 }
2433 ADDOP_O(c, op, mangled, varnames);
2434 Py_DECREF(mangled);
2435 return 1;
2436 case OP_GLOBAL:
2437 switch (ctx) {
2438 case Load: op = LOAD_GLOBAL; break;
2439 case Store: op = STORE_GLOBAL; break;
2440 case Del: op = DELETE_GLOBAL; break;
2441 case AugLoad:
2442 case AugStore:
2443 break;
2444 case Param:
2445 default:
2446 PyErr_SetString(PyExc_SystemError,
2447 "param invalid for global variable");
2448 return 0;
2449 }
2450 break;
2451 case OP_NAME:
2452 switch (ctx) {
2453 case Load: op = LOAD_NAME; break;
2454 case Store: op = STORE_NAME; break;
2455 case Del: op = DELETE_NAME; break;
2456 case AugLoad:
2457 case AugStore:
2458 break;
2459 case Param:
2460 default:
2461 PyErr_SetString(PyExc_SystemError,
2462 "param invalid for name variable");
2463 return 0;
2464 }
2465 break;
2466 }
2467
2468 assert(op);
2469 arg = compiler_add_o(c, dict, mangled);
2470 Py_DECREF(mangled);
2471 if (arg < 0)
2472 return 0;
2473 return compiler_addop_i(c, op, arg);
2474 }
2475
2476 static int
2477 compiler_boolop(struct compiler *c, expr_ty e)
2478 {
2479 basicblock *end;
2480 int jumpi, i, n;
2481 asdl_seq *s;
2482
2483 assert(e->kind == BoolOp_kind);
2484 if (e->v.BoolOp.op == And)
2485 jumpi = JUMP_IF_FALSE_OR_POP;
2486 else
2487 jumpi = JUMP_IF_TRUE_OR_POP;
2488 end = compiler_new_block(c);
2489 if (end == NULL)
2490 return 0;
2491 s = e->v.BoolOp.values;
2492 n = asdl_seq_LEN(s) - 1;
2493 assert(n >= 0);
2494 for (i = 0; i < n; ++i) {
2495 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i));
2496 ADDOP_JABS(c, jumpi, end);
2497 }
2498 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
2499 compiler_use_next_block(c, end);
2500 return 1;
2501 }
2502
2503 static int
2504 compiler_list(struct compiler *c, expr_ty e)
2505 {
2506 int n = asdl_seq_LEN(e->v.List.elts);
2507 if (e->v.List.ctx == Store) {
2508 ADDOP_I(c, UNPACK_SEQUENCE, n);
2509 }
2510 VISIT_SEQ(c, expr, e->v.List.elts);
2511 if (e->v.List.ctx == Load) {
2512 ADDOP_I(c, BUILD_LIST, n);
2513 }
2514 return 1;
2515 }
2516
2517 static int
2518 compiler_tuple(struct compiler *c, expr_ty e)
2519 {
2520 int n = asdl_seq_LEN(e->v.Tuple.elts);
2521 if (e->v.Tuple.ctx == Store) {
2522 ADDOP_I(c, UNPACK_SEQUENCE, n);
2523 }
2524 VISIT_SEQ(c, expr, e->v.Tuple.elts);
2525 if (e->v.Tuple.ctx == Load) {
2526 ADDOP_I(c, BUILD_TUPLE, n);
2527 }
2528 return 1;
2529 }
2530
2531 static int
2532 compiler_compare(struct compiler *c, expr_ty e)
2533 {
2534 int i, n;
2535 basicblock *cleanup = NULL;
2536
2537 /* XXX the logic can be cleaned up for 1 or multiple comparisons */
2538 VISIT(c, expr, e->v.Compare.left);
2539 n = asdl_seq_LEN(e->v.Compare.ops);
2540 assert(n > 0);
2541 if (n > 1) {
2542 cleanup = compiler_new_block(c);
2543 if (cleanup == NULL)
2544 return 0;
2545 VISIT(c, expr,
2546 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0));
2547 }
2548 for (i = 1; i < n; i++) {
2549 ADDOP(c, DUP_TOP);
2550 ADDOP(c, ROT_THREE);
2551 ADDOP_I(c, COMPARE_OP,
2552 cmpop((cmpop_ty)(asdl_seq_GET(
2553 e->v.Compare.ops, i - 1))));
2554 ADDOP_JABS(c, JUMP_IF_FALSE_OR_POP, cleanup);
2555 NEXT_BLOCK(c);
2556 if (i < (n - 1))
2557 VISIT(c, expr,
2558 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
2559 }
2560 VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n - 1));
2561 ADDOP_I(c, COMPARE_OP,
2562 cmpop((cmpop_ty)(asdl_seq_GET(e->v.Compare.ops, n - 1))));
2563 if (n > 1) {
2564 basicblock *end = compiler_new_block(c);
2565 if (end == NULL)
2566 return 0;
2567 ADDOP_JREL(c, JUMP_FORWARD, end);
2568 compiler_use_next_block(c, cleanup);
2569 ADDOP(c, ROT_TWO);
2570 ADDOP(c, POP_TOP);
2571 compiler_use_next_block(c, end);
2572 }
2573 return 1;
2574 }
2575
2576 static int
2577 compiler_call(struct compiler *c, expr_ty e)
2578 {
2579 int n, code = 0;
2580
2581 VISIT(c, expr, e->v.Call.func);
2582 n = asdl_seq_LEN(e->v.Call.args);
2583 VISIT_SEQ(c, expr, e->v.Call.args);
2584 if (e->v.Call.keywords) {
2585 VISIT_SEQ(c, keyword, e->v.Call.keywords);
2586 n |= asdl_seq_LEN(e->v.Call.keywords) << 8;
2587 }
2588 if (e->v.Call.starargs) {
2589 VISIT(c, expr, e->v.Call.starargs);
2590 code |= 1;
2591 }
2592 if (e->v.Call.kwargs) {
2593 VISIT(c, expr, e->v.Call.kwargs);
2594 code |= 2;
2595 }
2596 switch (code) {
2597 case 0:
2598 ADDOP_I(c, CALL_FUNCTION, n);
2599 break;
2600 case 1:
2601 ADDOP_I(c, CALL_FUNCTION_VAR, n);
2602 break;
2603 case 2:
2604 ADDOP_I(c, CALL_FUNCTION_KW, n);
2605 break;
2606 case 3:
2607 ADDOP_I(c, CALL_FUNCTION_VAR_KW, n);
2608 break;
2609 }
2610 return 1;
2611 }
2612
2613 static int
2614 compiler_listcomp_generator(struct compiler *c, asdl_seq *generators,
2615 int gen_index, expr_ty elt)
2616 {
2617 /* generate code for the iterator, then each of the ifs,
2618 and then write to the element */
2619
2620 comprehension_ty l;
2621 basicblock *start, *anchor, *skip, *if_cleanup;
2622 int i, n;
2623
2624 start = compiler_new_block(c);
2625 skip = compiler_new_block(c);
2626 if_cleanup = compiler_new_block(c);
2627 anchor = compiler_new_block(c);
2628
2629 if (start == NULL || skip == NULL || if_cleanup == NULL ||
2630 anchor == NULL)
2631 return 0;
2632
2633 l = (comprehension_ty)asdl_seq_GET(generators, gen_index);
2634 VISIT(c, expr, l->iter);
2635 ADDOP(c, GET_ITER);
2636 compiler_use_next_block(c, start);
2637 ADDOP_JREL(c, FOR_ITER, anchor);
2638 NEXT_BLOCK(c);
2639 VISIT(c, expr, l->target);
2640
2641 /* XXX this needs to be cleaned up...a lot! */
2642 n = asdl_seq_LEN(l->ifs);
2643 for (i = 0; i < n; i++) {
2644 expr_ty e = (expr_ty)asdl_seq_GET(l->ifs, i);
2645 VISIT(c, expr, e);
2646 ADDOP_JABS(c, POP_JUMP_IF_FALSE, if_cleanup);
2647 NEXT_BLOCK(c);
2648 }
2649
2650 if (++gen_index < asdl_seq_LEN(generators))
2651 if (!compiler_listcomp_generator(c, generators, gen_index, elt))
2652 return 0;
2653
2654 /* only append after the last for generator */
2655 if (gen_index >= asdl_seq_LEN(generators)) {
2656 VISIT(c, expr, elt);
2657 ADDOP_I(c, LIST_APPEND, gen_index+1);
2658
2659 compiler_use_next_block(c, skip);
2660 }
2661 compiler_use_next_block(c, if_cleanup);
2662 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2663 compiler_use_next_block(c, anchor);
2664
2665 return 1;
2666 }
2667
2668 static int
2669 compiler_listcomp(struct compiler *c, expr_ty e)
2670 {
2671 assert(e->kind == ListComp_kind);
2672 ADDOP_I(c, BUILD_LIST, 0);
2673 return compiler_listcomp_generator(c, e->v.ListComp.generators, 0,
2674 e->v.ListComp.elt);
2675 }
2676
2677 /* Dict and set comprehensions and generator expressions work by creating a
2678 nested function to perform the actual iteration. This means that the
2679 iteration variables don't leak into the current scope.
2680 The defined function is called immediately following its definition, with the
2681 result of that call being the result of the expression.
2682 The LC/SC version returns the populated container, while the GE version is
2683 flagged in symtable.c as a generator, so it returns the generator object
2684 when the function is called.
2685 This code *knows* that the loop cannot contain break, continue, or return,
2686 so it cheats and skips the SETUP_LOOP/POP_BLOCK steps used in normal loops.
2687
2688 Possible cleanups:
2689 - iterate over the generator sequence instead of using recursion
2690 */
2691
2692 static int
2693 compiler_comprehension_generator(struct compiler *c,
2694 asdl_seq *generators, int gen_index,
2695 expr_ty elt, expr_ty val, int type)
2696 {
2697 /* generate code for the iterator, then each of the ifs,
2698 and then write to the element */
2699
2700 comprehension_ty gen;
2701 basicblock *start, *anchor, *skip, *if_cleanup;
2702 int i, n;
2703
2704 start = compiler_new_block(c);
2705 skip = compiler_new_block(c);
2706 if_cleanup = compiler_new_block(c);
2707 anchor = compiler_new_block(c);
2708
2709 if (start == NULL || skip == NULL || if_cleanup == NULL ||
2710 anchor == NULL)
2711 return 0;
2712
2713 gen = (comprehension_ty)asdl_seq_GET(generators, gen_index);
2714
2715 if (gen_index == 0) {
2716 /* Receive outermost iter as an implicit argument */
2717 c->u->u_argcount = 1;
2718 ADDOP_I(c, LOAD_FAST, 0);
2719 }
2720 else {
2721 /* Sub-iter - calculate on the fly */
2722 VISIT(c, expr, gen->iter);
2723 ADDOP(c, GET_ITER);
2724 }
2725 compiler_use_next_block(c, start);
2726 ADDOP_JREL(c, FOR_ITER, anchor);
2727 NEXT_BLOCK(c);
2728 VISIT(c, expr, gen->target);
2729
2730 /* XXX this needs to be cleaned up...a lot! */
2731 n = asdl_seq_LEN(gen->ifs);
2732 for (i = 0; i < n; i++) {
2733 expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
2734 VISIT(c, expr, e);
2735 ADDOP_JABS(c, POP_JUMP_IF_FALSE, if_cleanup);
2736 NEXT_BLOCK(c);
2737 }
2738
2739 if (++gen_index < asdl_seq_LEN(generators))
2740 if (!compiler_comprehension_generator(c,
2741 generators, gen_index,
2742 elt, val, type))
2743 return 0;
2744
2745 /* only append after the last for generator */
2746 if (gen_index >= asdl_seq_LEN(generators)) {
2747 /* comprehension specific code */
2748 switch (type) {
2749 case COMP_GENEXP:
2750 VISIT(c, expr, elt);
2751 ADDOP(c, YIELD_VALUE);
2752 ADDOP(c, POP_TOP);
2753 break;
2754 case COMP_SETCOMP:
2755 VISIT(c, expr, elt);
2756 ADDOP_I(c, SET_ADD, gen_index + 1);
2757 break;
2758 case COMP_DICTCOMP:
2759 /* With 'd[k] = v', v is evaluated before k, so we do
2760 the same. */
2761 VISIT(c, expr, val);
2762 VISIT(c, expr, elt);
2763 ADDOP_I(c, MAP_ADD, gen_index + 1);
2764 break;
2765 default:
2766 return 0;
2767 }
2768
2769 compiler_use_next_block(c, skip);
2770 }
2771 compiler_use_next_block(c, if_cleanup);
2772 ADDOP_JABS(c, JUMP_ABSOLUTE, start);
2773 compiler_use_next_block(c, anchor);
2774
2775 return 1;
2776 }
2777
2778 static int
2779 compiler_comprehension(struct compiler *c, expr_ty e, int type, identifier name,
2780 asdl_seq *generators, expr_ty elt, expr_ty val)
2781 {
2782 PyCodeObject *co = NULL;
2783 expr_ty outermost_iter;
2784
2785 outermost_iter = ((comprehension_ty)
2786 asdl_seq_GET(generators, 0))->iter;
2787
2788 if (!compiler_enter_scope(c, name, (void *)e, e->lineno))
2789 goto error;
2790
2791 if (type != COMP_GENEXP) {
2792 int op;
2793 switch (type) {
2794 case COMP_SETCOMP:
2795 op = BUILD_SET;
2796 break;
2797 case COMP_DICTCOMP:
2798 op = BUILD_MAP;
2799 break;
2800 default:
2801 PyErr_Format(PyExc_SystemError,
2802 "unknown comprehension type %d", type);
2803 goto error_in_scope;
2804 }
2805
2806 ADDOP_I(c, op, 0);
2807 }
2808
2809 if (!compiler_comprehension_generator(c, generators, 0, elt,
2810 val, type))
2811 goto error_in_scope;
2812
2813 if (type != COMP_GENEXP) {
2814 ADDOP(c, RETURN_VALUE);
2815 }
2816
2817 co = assemble(c, 1);
2818 compiler_exit_scope(c);
2819 if (co == NULL)
2820 goto error;
2821
2822 if (!compiler_make_closure(c, co, 0))
2823 goto error;
2824 Py_DECREF(co);
2825
2826 VISIT(c, expr, outermost_iter);
2827 ADDOP(c, GET_ITER);
2828 ADDOP_I(c, CALL_FUNCTION, 1);
2829 return 1;
2830 error_in_scope:
2831 compiler_exit_scope(c);
2832 error:
2833 Py_XDECREF(co);
2834 return 0;
2835 }
2836
2837 static int
2838 compiler_genexp(struct compiler *c, expr_ty e)
2839 {
2840 static identifier name;
2841 if (!name) {
2842 name = PyString_FromString("<genexpr>");
2843 if (!name)
2844 return 0;
2845 }
2846 assert(e->kind == GeneratorExp_kind);
2847 return compiler_comprehension(c, e, COMP_GENEXP, name,
2848 e->v.GeneratorExp.generators,
2849 e->v.GeneratorExp.elt, NULL);
2850 }
2851
2852 static int
2853 compiler_setcomp(struct compiler *c, expr_ty e)
2854 {
2855 static identifier name;
2856 if (!name) {
2857 name = PyString_FromString("<setcomp>");
2858 if (!name)
2859 return 0;
2860 }
2861 assert(e->kind == SetComp_kind);
2862 return compiler_comprehension(c, e, COMP_SETCOMP, name,
2863 e->v.SetComp.generators,
2864 e->v.SetComp.elt, NULL);
2865 }
2866
2867 static int
2868 compiler_dictcomp(struct compiler *c, expr_ty e)
2869 {
2870 static identifier name;
2871 if (!name) {
2872 name = PyString_FromString("<dictcomp>");
2873 if (!name)
2874 return 0;
2875 }
2876 assert(e->kind == DictComp_kind);
2877 return compiler_comprehension(c, e, COMP_DICTCOMP, name,
2878 e->v.DictComp.generators,
2879 e->v.DictComp.key, e->v.DictComp.value);
2880 }
2881
2882 static int
2883 compiler_visit_keyword(struct compiler *c, keyword_ty k)
2884 {
2885 ADDOP_O(c, LOAD_CONST, k->arg, consts);
2886 VISIT(c, expr, k->value);
2887 return 1;
2888 }
2889
2890 /* Test whether expression is constant. For constants, report
2891 whether they are true or false.
2892
2893 Return values: 1 for true, 0 for false, -1 for non-constant.
2894 */
2895
2896 static int
2897 expr_constant(expr_ty e)
2898 {
2899 switch (e->kind) {
2900 case Num_kind:
2901 return PyObject_IsTrue(e->v.Num.n);
2902 case Str_kind:
2903 return PyObject_IsTrue(e->v.Str.s);
2904 case Name_kind:
2905 /* __debug__ is not assignable, so we can optimize
2906 * it away in if and while statements */
2907 if (strcmp(PyString_AS_STRING(e->v.Name.id),
2908 "__debug__") == 0)
2909 return ! Py_OptimizeFlag;
2910 /* fall through */
2911 default:
2912 return -1;
2913 }
2914 }
2915
2916 /*
2917 Implements the with statement from PEP 343.
2918
2919 The semantics outlined in that PEP are as follows:
2920
2921 with EXPR as VAR:
2922 BLOCK
2923
2924 It is implemented roughly as:
2925
2926 context = EXPR
2927 exit = context.__exit__ # not calling it
2928 value = context.__enter__()
2929 try:
2930 VAR = value # if VAR present in the syntax
2931 BLOCK
2932 finally:
2933 if an exception was raised:
2934 exc = copy of (exception, instance, traceback)
2935 else:
2936 exc = (None, None, None)
2937 exit(*exc)
2938 */
2939 static int
2940 compiler_with(struct compiler *c, stmt_ty s)
2941 {
2942 basicblock *block, *finally;
2943
2944 assert(s->kind == With_kind);
2945
2946 block = compiler_new_block(c);
2947 finally = compiler_new_block(c);
2948 if (!block || !finally)
2949 return 0;
2950
2951 /* Evaluate EXPR */
2952 VISIT(c, expr, s->v.With.context_expr);
2953 ADDOP_JREL(c, SETUP_WITH, finally);
2954
2955 /* SETUP_WITH pushes a finally block. */
2956 compiler_use_next_block(c, block);
2957 /* Note that the block is actually called SETUP_WITH in ceval.c, but
2958 functions the same as SETUP_FINALLY except that exceptions are
2959 normalized. */
2960 if (!compiler_push_fblock(c, FINALLY_TRY, block)) {
2961 return 0;
2962 }
2963
2964 if (s->v.With.optional_vars) {
2965 VISIT(c, expr, s->v.With.optional_vars);
2966 }
2967 else {
2968 /* Discard result from context.__enter__() */
2969 ADDOP(c, POP_TOP);
2970 }
2971
2972 /* BLOCK code */
2973 VISIT_SEQ(c, stmt, s->v.With.body);
2974
2975 /* End of try block; start the finally block */
2976 ADDOP(c, POP_BLOCK);
2977 compiler_pop_fblock(c, FINALLY_TRY, block);
2978
2979 ADDOP_O(c, LOAD_CONST, Py_None, consts);
2980 compiler_use_next_block(c, finally);
2981 if (!compiler_push_fblock(c, FINALLY_END, finally))
2982 return 0;
2983
2984 /* Finally block starts; context.__exit__ is on the stack under
2985 the exception or return information. Just issue our magic
2986 opcode. */
2987 ADDOP(c, WITH_CLEANUP);
2988
2989 /* Finally block ends. */
2990 ADDOP(c, END_FINALLY);
2991 compiler_pop_fblock(c, FINALLY_END, finally);
2992 return 1;
2993 }
2994
2995 static int
2996 compiler_visit_expr(struct compiler *c, expr_ty e)
2997 {
2998 int i, n;
2999
3000 /* If expr e has a different line number than the last expr/stmt,
3001 set a new line number for the next instruction.
3002 */
3003 if (e->lineno > c->u->u_lineno) {
3004 c->u->u_lineno = e->lineno;
3005 c->u->u_lineno_set = false;
3006 }
3007 switch (e->kind) {
3008 case BoolOp_kind:
3009 return compiler_boolop(c, e);
3010 case BinOp_kind:
3011 VISIT(c, expr, e->v.BinOp.left);
3012 VISIT(c, expr, e->v.BinOp.right);
3013 ADDOP(c, binop(c, e->v.BinOp.op));
3014 break;
3015 case UnaryOp_kind:
3016 VISIT(c, expr, e->v.UnaryOp.operand);
3017 ADDOP(c, unaryop(e->v.UnaryOp.op));
3018 break;
3019 case Lambda_kind:
3020 return compiler_lambda(c, e);
3021 case IfExp_kind:
3022 return compiler_ifexp(c, e);
3023 case Dict_kind:
3024 n = asdl_seq_LEN(e->v.Dict.values);
3025 ADDOP_I(c, BUILD_MAP, (n>0xFFFF ? 0xFFFF : n));
3026 for (i = 0; i < n; i++) {
3027 VISIT(c, expr,
3028 (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
3029 VISIT(c, expr,
3030 (expr_ty)asdl_seq_GET(e->v.Dict.keys, i));
3031 ADDOP(c, STORE_MAP);
3032 }
3033 break;
3034 case Set_kind:
3035 n = asdl_seq_LEN(e->v.Set.elts);
3036 VISIT_SEQ(c, expr, e->v.Set.elts);
3037 ADDOP_I(c, BUILD_SET, n);
3038 break;
3039 case ListComp_kind:
3040 return compiler_listcomp(c, e);
3041 case SetComp_kind:
3042 return compiler_setcomp(c, e);
3043 case DictComp_kind:
3044 return compiler_dictcomp(c, e);
3045 case GeneratorExp_kind:
3046 return compiler_genexp(c, e);
3047 case Yield_kind:
3048 if (c->u->u_ste->ste_type != FunctionBlock)
3049 return compiler_error(c, "'yield' outside function");
3050 if (e->v.Yield.value) {
3051 VISIT(c, expr, e->v.Yield.value);
3052 }
3053 else {
3054 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3055 }
3056 ADDOP(c, YIELD_VALUE);
3057 break;
3058 case Compare_kind:
3059 return compiler_compare(c, e);
3060 case Call_kind:
3061 return compiler_call(c, e);
3062 case Repr_kind:
3063 VISIT(c, expr, e->v.Repr.value);
3064 ADDOP(c, UNARY_CONVERT);
3065 break;
3066 case Num_kind:
3067 ADDOP_O(c, LOAD_CONST, e->v.Num.n, consts);
3068 break;
3069 case Str_kind:
3070 ADDOP_O(c, LOAD_CONST, e->v.Str.s, consts);
3071 break;
3072 /* The following exprs can be assignment targets. */
3073 case Attribute_kind:
3074 if (e->v.Attribute.ctx != AugStore)
3075 VISIT(c, expr, e->v.Attribute.value);
3076 switch (e->v.Attribute.ctx) {
3077 case AugLoad:
3078 ADDOP(c, DUP_TOP);
3079 /* Fall through to load */
3080 case Load:
3081 ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names);
3082 break;
3083 case AugStore:
3084 ADDOP(c, ROT_TWO);
3085 /* Fall through to save */
3086 case Store:
3087 ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names);
3088 break;
3089 case Del:
3090 ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names);
3091 break;
3092 case Param:
3093 default:
3094 PyErr_SetString(PyExc_SystemError,
3095 "param invalid in attribute expression");
3096 return 0;
3097 }
3098 break;
3099 case Subscript_kind:
3100 switch (e->v.Subscript.ctx) {
3101 case AugLoad:
3102 VISIT(c, expr, e->v.Subscript.value);
3103 VISIT_SLICE(c, e->v.Subscript.slice, AugLoad);
3104 break;
3105 case Load:
3106 VISIT(c, expr, e->v.Subscript.value);
3107 VISIT_SLICE(c, e->v.Subscript.slice, Load);
3108 break;
3109 case AugStore:
3110 VISIT_SLICE(c, e->v.Subscript.slice, AugStore);
3111 break;
3112 case Store:
3113 VISIT(c, expr, e->v.Subscript.value);
3114 VISIT_SLICE(c, e->v.Subscript.slice, Store);
3115 break;
3116 case Del:
3117 VISIT(c, expr, e->v.Subscript.value);
3118 VISIT_SLICE(c, e->v.Subscript.slice, Del);
3119 break;
3120 case Param:
3121 default:
3122 PyErr_SetString(PyExc_SystemError,
3123 "param invalid in subscript expression");
3124 return 0;
3125 }
3126 break;
3127 case Name_kind:
3128 return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx);
3129 /* child nodes of List and Tuple will have expr_context set */
3130 case List_kind:
3131 return compiler_list(c, e);
3132 case Tuple_kind:
3133 return compiler_tuple(c, e);
3134 }
3135 return 1;
3136 }
3137
3138 static int
3139 compiler_augassign(struct compiler *c, stmt_ty s)
3140 {
3141 expr_ty e = s->v.AugAssign.target;
3142 expr_ty auge;
3143
3144 assert(s->kind == AugAssign_kind);
3145
3146 switch (e->kind) {
3147 case Attribute_kind:
3148 auge = Attribute(e->v.Attribute.value, e->v.Attribute.attr,
3149 AugLoad, e->lineno, e->col_offset, c->c_arena);
3150 if (auge == NULL)
3151 return 0;
3152 VISIT(c, expr, auge);
3153 VISIT(c, expr, s->v.AugAssign.value);
3154 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3155 auge->v.Attribute.ctx = AugStore;
3156 VISIT(c, expr, auge);
3157 break;
3158 case Subscript_kind:
3159 auge = Subscript(e->v.Subscript.value, e->v.Subscript.slice,
3160 AugLoad, e->lineno, e->col_offset, c->c_arena);
3161 if (auge == NULL)
3162 return 0;
3163 VISIT(c, expr, auge);
3164 VISIT(c, expr, s->v.AugAssign.value);
3165 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3166 auge->v.Subscript.ctx = AugStore;
3167 VISIT(c, expr, auge);
3168 break;
3169 case Name_kind:
3170 if (!compiler_nameop(c, e->v.Name.id, Load))
3171 return 0;
3172 VISIT(c, expr, s->v.AugAssign.value);
3173 ADDOP(c, inplace_binop(c, s->v.AugAssign.op));
3174 return compiler_nameop(c, e->v.Name.id, Store);
3175 default:
3176 PyErr_Format(PyExc_SystemError,
3177 "invalid node type (%d) for augmented assignment",
3178 e->kind);
3179 return 0;
3180 }
3181 return 1;
3182 }
3183
3184 static int
3185 compiler_push_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3186 {
3187 struct fblockinfo *f;
3188 if (c->u->u_nfblocks >= CO_MAXBLOCKS) {
3189 PyErr_SetString(PyExc_SystemError,
3190 "too many statically nested blocks");
3191 return 0;
3192 }
3193 f = &c->u->u_fblock[c->u->u_nfblocks++];
3194 f->fb_type = t;
3195 f->fb_block = b;
3196 return 1;
3197 }
3198
3199 static void
3200 compiler_pop_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
3201 {
3202 struct compiler_unit *u = c->u;
3203 assert(u->u_nfblocks > 0);
3204 u->u_nfblocks--;
3205 assert(u->u_fblock[u->u_nfblocks].fb_type == t);
3206 assert(u->u_fblock[u->u_nfblocks].fb_block == b);
3207 }
3208
3209 static int
3210 compiler_in_loop(struct compiler *c) {
3211 int i;
3212 struct compiler_unit *u = c->u;
3213 for (i = 0; i < u->u_nfblocks; ++i) {
3214 if (u->u_fblock[i].fb_type == LOOP)
3215 return 1;
3216 }
3217 return 0;
3218 }
3219 /* Raises a SyntaxError and returns 0.
3220 If something goes wrong, a different exception may be raised.
3221 */
3222
3223 static int
3224 compiler_error(struct compiler *c, const char *errstr)
3225 {
3226 PyObject *loc;
3227 PyObject *u = NULL, *v = NULL;
3228
3229 loc = PyErr_ProgramText(c->c_filename, c->u->u_lineno);
3230 if (!loc) {
3231 Py_INCREF(Py_None);
3232 loc = Py_None;
3233 }
3234 u = Py_BuildValue("(ziOO)", c->c_filename, c->u->u_lineno,
3235 Py_None, loc);
3236 if (!u)
3237 goto exit;
3238 v = Py_BuildValue("(zO)", errstr, u);
3239 if (!v)
3240 goto exit;
3241 PyErr_SetObject(PyExc_SyntaxError, v);
3242 exit:
3243 Py_DECREF(loc);
3244 Py_XDECREF(u);
3245 Py_XDECREF(v);
3246 return 0;
3247 }
3248
3249 static int
3250 compiler_handle_subscr(struct compiler *c, const char *kind,
3251 expr_context_ty ctx)
3252 {
3253 int op = 0;
3254
3255 /* XXX this code is duplicated */
3256 switch (ctx) {
3257 case AugLoad: /* fall through to Load */
3258 case Load: op = BINARY_SUBSCR; break;
3259 case AugStore:/* fall through to Store */
3260 case Store: op = STORE_SUBSCR; break;
3261 case Del: op = DELETE_SUBSCR; break;
3262 case Param:
3263 PyErr_Format(PyExc_SystemError,
3264 "invalid %s kind %d in subscript\n",
3265 kind, ctx);
3266 return 0;
3267 }
3268 if (ctx == AugLoad) {
3269 ADDOP_I(c, DUP_TOPX, 2);
3270 }
3271 else if (ctx == AugStore) {
3272 ADDOP(c, ROT_THREE);
3273 }
3274 ADDOP(c, op);
3275 return 1;
3276 }
3277
3278 static int
3279 compiler_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3280 {
3281 int n = 2;
3282 assert(s->kind == Slice_kind);
3283
3284 /* only handles the cases where BUILD_SLICE is emitted */
3285 if (s->v.Slice.lower) {
3286 VISIT(c, expr, s->v.Slice.lower);
3287 }
3288 else {
3289 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3290 }
3291
3292 if (s->v.Slice.upper) {
3293 VISIT(c, expr, s->v.Slice.upper);
3294 }
3295 else {
3296 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3297 }
3298
3299 if (s->v.Slice.step) {
3300 n++;
3301 VISIT(c, expr, s->v.Slice.step);
3302 }
3303 ADDOP_I(c, BUILD_SLICE, n);
3304 return 1;
3305 }
3306
3307 static int
3308 compiler_simple_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3309 {
3310 int op = 0, slice_offset = 0, stack_count = 0;
3311
3312 assert(s->v.Slice.step == NULL);
3313 if (s->v.Slice.lower) {
3314 slice_offset++;
3315 stack_count++;
3316 if (ctx != AugStore)
3317 VISIT(c, expr, s->v.Slice.lower);
3318 }
3319 if (s->v.Slice.upper) {
3320 slice_offset += 2;
3321 stack_count++;
3322 if (ctx != AugStore)
3323 VISIT(c, expr, s->v.Slice.upper);
3324 }
3325
3326 if (ctx == AugLoad) {
3327 switch (stack_count) {
3328 case 0: ADDOP(c, DUP_TOP); break;
3329 case 1: ADDOP_I(c, DUP_TOPX, 2); break;
3330 case 2: ADDOP_I(c, DUP_TOPX, 3); break;
3331 }
3332 }
3333 else if (ctx == AugStore) {
3334 switch (stack_count) {
3335 case 0: ADDOP(c, ROT_TWO); break;
3336 case 1: ADDOP(c, ROT_THREE); break;
3337 case 2: ADDOP(c, ROT_FOUR); break;
3338 }
3339 }
3340
3341 switch (ctx) {
3342 case AugLoad: /* fall through to Load */
3343 case Load: op = SLICE; break;
3344 case AugStore:/* fall through to Store */
3345 case Store: op = STORE_SLICE; break;
3346 case Del: op = DELETE_SLICE; break;
3347 case Param:
3348 default:
3349 PyErr_SetString(PyExc_SystemError,
3350 "param invalid in simple slice");
3351 return 0;
3352 }
3353
3354 ADDOP(c, op + slice_offset);
3355 return 1;
3356 }
3357
3358 static int
3359 compiler_visit_nested_slice(struct compiler *c, slice_ty s,
3360 expr_context_ty ctx)
3361 {
3362 switch (s->kind) {
3363 case Ellipsis_kind:
3364 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3365 break;
3366 case Slice_kind:
3367 return compiler_slice(c, s, ctx);
3368 case Index_kind:
3369 VISIT(c, expr, s->v.Index.value);
3370 break;
3371 case ExtSlice_kind:
3372 default:
3373 PyErr_SetString(PyExc_SystemError,
3374 "extended slice invalid in nested slice");
3375 return 0;
3376 }
3377 return 1;
3378 }
3379
3380 static int
3381 compiler_visit_slice(struct compiler *c, slice_ty s, expr_context_ty ctx)
3382 {
3383 char * kindname = NULL;
3384 switch (s->kind) {
3385 case Index_kind:
3386 kindname = "index";
3387 if (ctx != AugStore) {
3388 VISIT(c, expr, s->v.Index.value);
3389 }
3390 break;
3391 case Ellipsis_kind:
3392 kindname = "ellipsis";
3393 if (ctx != AugStore) {
3394 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts);
3395 }
3396 break;
3397 case Slice_kind:
3398 kindname = "slice";
3399 if (!s->v.Slice.step)
3400 return compiler_simple_slice(c, s, ctx);
3401 if (ctx != AugStore) {
3402 if (!compiler_slice(c, s, ctx))
3403 return 0;
3404 }
3405 break;
3406 case ExtSlice_kind:
3407 kindname = "extended slice";
3408 if (ctx != AugStore) {
3409 int i, n = asdl_seq_LEN(s->v.ExtSlice.dims);
3410 for (i = 0; i < n; i++) {
3411 slice_ty sub = (slice_ty)asdl_seq_GET(
3412 s->v.ExtSlice.dims, i);
3413 if (!compiler_visit_nested_slice(c, sub, ctx))
3414 return 0;
3415 }
3416 ADDOP_I(c, BUILD_TUPLE, n);
3417 }
3418 break;
3419 default:
3420 PyErr_Format(PyExc_SystemError,
3421 "invalid subscript kind %d", s->kind);
3422 return 0;
3423 }
3424 return compiler_handle_subscr(c, kindname, ctx);
3425 }
3426
3427
3428 /* End of the compiler section, beginning of the assembler section */
3429
3430 /* do depth-first search of basic block graph, starting with block.
3431 post records the block indices in post-order.
3432
3433 XXX must handle implicit jumps from one block to next
3434 */
3435
3436 struct assembler {
3437 PyObject *a_bytecode; /* string containing bytecode */
3438 int a_offset; /* offset into bytecode */
3439 int a_nblocks; /* number of reachable blocks */
3440 basicblock **a_postorder; /* list of blocks in dfs postorder */
3441 PyObject *a_lnotab; /* string containing lnotab */
3442 int a_lnotab_off; /* offset into lnotab */
3443 int a_lineno; /* last lineno of emitted instruction */
3444 int a_lineno_off; /* bytecode offset of last lineno */
3445 };
3446
3447 static void
3448 dfs(struct compiler *c, basicblock *b, struct assembler *a)
3449 {
3450 int i;
3451 struct instr *instr = NULL;
3452
3453 if (b->b_seen)
3454 return;
3455 b->b_seen = 1;
3456 if (b->b_next != NULL)
3457 dfs(c, b->b_next, a);
3458 for (i = 0; i < b->b_iused; i++) {
3459 instr = &b->b_instr[i];
3460 if (instr->i_jrel || instr->i_jabs)
3461 dfs(c, instr->i_target, a);
3462 }
3463 a->a_postorder[a->a_nblocks++] = b;
3464 }
3465
3466 static int
3467 stackdepth_walk(struct compiler *c, basicblock *b, int depth, int maxdepth)
3468 {
3469 int i, target_depth;
3470 struct instr *instr;
3471 if (b->b_seen || b->b_startdepth >= depth)
3472 return maxdepth;
3473 b->b_seen = 1;
3474 b->b_startdepth = depth;
3475 for (i = 0; i < b->b_iused; i++) {
3476 instr = &b->b_instr[i];
3477 depth += opcode_stack_effect(instr->i_opcode, instr->i_oparg);
3478 if (depth > maxdepth)
3479 maxdepth = depth;
3480 assert(depth >= 0); /* invalid code or bug in stackdepth() */
3481 if (instr->i_jrel || instr->i_jabs) {
3482 target_depth = depth;
3483 if (instr->i_opcode == FOR_ITER) {
3484 target_depth = depth-2;
3485 }
3486 else if (instr->i_opcode == SETUP_FINALLY ||
3487 instr->i_opcode == SETUP_EXCEPT) {
3488 target_depth = depth+3;
3489 if (target_depth > maxdepth)
3490 maxdepth = target_depth;
3491 }
3492 else if (instr->i_opcode == JUMP_IF_TRUE_OR_POP ||
3493 instr->i_opcode == JUMP_IF_FALSE_OR_POP)
3494 depth = depth - 1;
3495 maxdepth = stackdepth_walk(c, instr->i_target,
3496 target_depth, maxdepth);
3497 if (instr->i_opcode == JUMP_ABSOLUTE ||
3498 instr->i_opcode == JUMP_FORWARD) {
3499 goto out; /* remaining code is dead */
3500 }
3501 }
3502 }
3503 if (b->b_next)
3504 maxdepth = stackdepth_walk(c, b->b_next, depth, maxdepth);
3505 out:
3506 b->b_seen = 0;
3507 return maxdepth;
3508 }
3509
3510 /* Find the flow path that needs the largest stack. We assume that
3511 * cycles in the flow graph have no net effect on the stack depth.
3512 */
3513 static int
3514 stackdepth(struct compiler *c)
3515 {
3516 basicblock *b, *entryblock;
3517 entryblock = NULL;
3518 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3519 b->b_seen = 0;
3520 b->b_startdepth = INT_MIN;
3521 entryblock = b;
3522 }
3523 if (!entryblock)
3524 return 0;
3525 return stackdepth_walk(c, entryblock, 0, 0);
3526 }
3527
3528 static int
3529 assemble_init(struct assembler *a, int nblocks, int firstlineno)
3530 {
3531 memset(a, 0, sizeof(struct assembler));
3532 a->a_lineno = firstlineno;
3533 a->a_bytecode = PyString_FromStringAndSize(NULL, DEFAULT_CODE_SIZE);
3534 if (!a->a_bytecode)
3535 return 0;
3536 a->a_lnotab = PyString_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE);
3537 if (!a->a_lnotab)
3538 return 0;
3539 if (nblocks > PY_SIZE_MAX / sizeof(basicblock *)) {
3540 PyErr_NoMemory();
3541 return 0;
3542 }
3543 a->a_postorder = (basicblock **)PyObject_Malloc(
3544 sizeof(basicblock *) * nblocks);
3545 if (!a->a_postorder) {
3546 PyErr_NoMemory();
3547 return 0;
3548 }
3549 return 1;
3550 }
3551
3552 static void
3553 assemble_free(struct assembler *a)
3554 {
3555 Py_XDECREF(a->a_bytecode);
3556 Py_XDECREF(a->a_lnotab);
3557 if (a->a_postorder)
3558 PyObject_Free(a->a_postorder);
3559 }
3560
3561 /* Return the size of a basic block in bytes. */
3562
3563 static int
3564 instrsize(struct instr *instr)
3565 {
3566 if (!instr->i_hasarg)
3567 return 1; /* 1 byte for the opcode*/
3568 if (instr->i_oparg > 0xffff)
3569 return 6; /* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */
3570 return 3; /* 1 (opcode) + 2 (oparg) */
3571 }
3572
3573 static int
3574 blocksize(basicblock *b)
3575 {
3576 int i;
3577 int size = 0;
3578
3579 for (i = 0; i < b->b_iused; i++)
3580 size += instrsize(&b->b_instr[i]);
3581 return size;
3582 }
3583
3584 /* Appends a pair to the end of the line number table, a_lnotab, representing
3585 the instruction's bytecode offset and line number. See
3586 Objects/lnotab_notes.txt for the description of the line number table. */
3587
3588 static int
3589 assemble_lnotab(struct assembler *a, struct instr *i)
3590 {
3591 int d_bytecode, d_lineno;
3592 int len;
3593 unsigned char *lnotab;
3594
3595 d_bytecode = a->a_offset - a->a_lineno_off;
3596 d_lineno = i->i_lineno - a->a_lineno;
3597
3598 assert(d_bytecode >= 0);
3599 assert(d_lineno >= 0);
3600
3601 if(d_bytecode == 0 && d_lineno == 0)
3602 return 1;
3603
3604 if (d_bytecode > 255) {
3605 int j, nbytes, ncodes = d_bytecode / 255;
3606 nbytes = a->a_lnotab_off + 2 * ncodes;
3607 len = PyString_GET_SIZE(a->a_lnotab);
3608 if (nbytes >= len) {
3609 if ((len <= INT_MAX / 2) && (len * 2 < nbytes))
3610 len = nbytes;
3611 else if (len <= INT_MAX / 2)
3612 len *= 2;
3613 else {
3614 PyErr_NoMemory();
3615 return 0;
3616 }
3617 if (_PyString_Resize(&a->a_lnotab, len) < 0)
3618 return 0;
3619 }
3620 lnotab = (unsigned char *)
3621 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3622 for (j = 0; j < ncodes; j++) {
3623 *lnotab++ = 255;
3624 *lnotab++ = 0;
3625 }
3626 d_bytecode -= ncodes * 255;
3627 a->a_lnotab_off += ncodes * 2;
3628 }
3629 assert(d_bytecode <= 255);
3630 if (d_lineno > 255) {
3631 int j, nbytes, ncodes = d_lineno / 255;
3632 nbytes = a->a_lnotab_off + 2 * ncodes;
3633 len = PyString_GET_SIZE(a->a_lnotab);
3634 if (nbytes >= len) {
3635 if ((len <= INT_MAX / 2) && len * 2 < nbytes)
3636 len = nbytes;
3637 else if (len <= INT_MAX / 2)
3638 len *= 2;
3639 else {
3640 PyErr_NoMemory();
3641 return 0;
3642 }
3643 if (_PyString_Resize(&a->a_lnotab, len) < 0)
3644 return 0;
3645 }
3646 lnotab = (unsigned char *)
3647 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3648 *lnotab++ = d_bytecode;
3649 *lnotab++ = 255;
3650 d_bytecode = 0;
3651 for (j = 1; j < ncodes; j++) {
3652 *lnotab++ = 0;
3653 *lnotab++ = 255;
3654 }
3655 d_lineno -= ncodes * 255;
3656 a->a_lnotab_off += ncodes * 2;
3657 }
3658
3659 len = PyString_GET_SIZE(a->a_lnotab);
3660 if (a->a_lnotab_off + 2 >= len) {
3661 if (_PyString_Resize(&a->a_lnotab, len * 2) < 0)
3662 return 0;
3663 }
3664 lnotab = (unsigned char *)
3665 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
3666
3667 a->a_lnotab_off += 2;
3668 if (d_bytecode) {
3669 *lnotab++ = d_bytecode;
3670 *lnotab++ = d_lineno;
3671 }
3672 else { /* First line of a block; def stmt, etc. */
3673 *lnotab++ = 0;
3674 *lnotab++ = d_lineno;
3675 }
3676 a->a_lineno = i->i_lineno;
3677 a->a_lineno_off = a->a_offset;
3678 return 1;
3679 }
3680
3681 /* assemble_emit()
3682 Extend the bytecode with a new instruction.
3683 Update lnotab if necessary.
3684 */
3685
3686 static int
3687 assemble_emit(struct assembler *a, struct instr *i)
3688 {
3689 int size, arg = 0, ext = 0;
3690 Py_ssize_t len = PyString_GET_SIZE(a->a_bytecode);
3691 char *code;
3692
3693 size = instrsize(i);
3694 if (i->i_hasarg) {
3695 arg = i->i_oparg;
3696 ext = arg >> 16;
3697 }
3698 if (i->i_lineno && !assemble_lnotab(a, i))
3699 return 0;
3700 if (a->a_offset + size >= len) {
3701 if (len > PY_SSIZE_T_MAX / 2)
3702 return 0;
3703 if (_PyString_Resize(&a->a_bytecode, len * 2) < 0)
3704 return 0;
3705 }
3706 code = PyString_AS_STRING(a->a_bytecode) + a->a_offset;
3707 a->a_offset += size;
3708 if (size == 6) {
3709 assert(i->i_hasarg);
3710 *code++ = (char)EXTENDED_ARG;
3711 *code++ = ext & 0xff;
3712 *code++ = ext >> 8;
3713 arg &= 0xffff;
3714 }
3715 *code++ = i->i_opcode;
3716 if (i->i_hasarg) {
3717 assert(size == 3 || size == 6);
3718 *code++ = arg & 0xff;
3719 *code++ = arg >> 8;
3720 }
3721 return 1;
3722 }
3723
3724 static void
3725 assemble_jump_offsets(struct assembler *a, struct compiler *c)
3726 {
3727 basicblock *b;
3728 int bsize, totsize, extended_arg_count = 0, last_extended_arg_count;
3729 int i;
3730
3731 /* Compute the size of each block and fixup jump args.
3732 Replace block pointer with position in bytecode. */
3733 do {
3734 totsize = 0;
3735 for (i = a->a_nblocks - 1; i >= 0; i--) {
3736 b = a->a_postorder[i];
3737 bsize = blocksize(b);
3738 b->b_offset = totsize;
3739 totsize += bsize;
3740 }
3741 last_extended_arg_count = extended_arg_count;
3742 extended_arg_count = 0;
3743 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3744 bsize = b->b_offset;
3745 for (i = 0; i < b->b_iused; i++) {
3746 struct instr *instr = &b->b_instr[i];
3747 /* Relative jumps are computed relative to
3748 the instruction pointer after fetching
3749 the jump instruction.
3750 */
3751 bsize += instrsize(instr);
3752 if (instr->i_jabs)
3753 instr->i_oparg = instr->i_target->b_offset;
3754 else if (instr->i_jrel) {
3755 int delta = instr->i_target->b_offset - bsize;
3756 instr->i_oparg = delta;
3757 }
3758 else
3759 continue;
3760 if (instr->i_oparg > 0xffff)
3761 extended_arg_count++;
3762 }
3763 }
3764
3765 /* XXX: This is an awful hack that could hurt performance, but
3766 on the bright side it should work until we come up
3767 with a better solution.
3768
3769 The issue is that in the first loop blocksize() is called
3770 which calls instrsize() which requires i_oparg be set
3771 appropriately. There is a bootstrap problem because
3772 i_oparg is calculated in the second loop above.
3773
3774 So we loop until we stop seeing new EXTENDED_ARGs.
3775 The only EXTENDED_ARGs that could be popping up are
3776 ones in jump instructions. So this should converge
3777 fairly quickly.
3778 */
3779 } while (last_extended_arg_count != extended_arg_count);
3780 }
3781
3782 static PyObject *
3783 dict_keys_inorder(PyObject *dict, int offset)
3784 {
3785 PyObject *tuple, *k, *v;
3786 Py_ssize_t i, pos = 0, size = PyDict_Size(dict);
3787
3788 tuple = PyTuple_New(size);
3789 if (tuple == NULL)
3790 return NULL;
3791 while (PyDict_Next(dict, &pos, &k, &v)) {
3792 i = PyInt_AS_LONG(v);
3793 /* The keys of the dictionary are tuples. (see compiler_add_o)
3794 The object we want is always first, though. */
3795 k = PyTuple_GET_ITEM(k, 0);
3796 Py_INCREF(k);
3797 assert((i - offset) < size);
3798 assert((i - offset) >= 0);
3799 PyTuple_SET_ITEM(tuple, i - offset, k);
3800 }
3801 return tuple;
3802 }
3803
3804 static int
3805 compute_code_flags(struct compiler *c)
3806 {
3807 PySTEntryObject *ste = c->u->u_ste;
3808 int flags = 0, n;
3809 if (ste->ste_type != ModuleBlock)
3810 flags |= CO_NEWLOCALS;
3811 if (ste->ste_type == FunctionBlock) {
3812 if (!ste->ste_unoptimized)
3813 flags |= CO_OPTIMIZED;
3814 if (ste->ste_nested)
3815 flags |= CO_NESTED;
3816 if (ste->ste_generator)
3817 flags |= CO_GENERATOR;
3818 if (ste->ste_varargs)
3819 flags |= CO_VARARGS;
3820 if (ste->ste_varkeywords)
3821 flags |= CO_VARKEYWORDS;
3822 }
3823
3824 /* (Only) inherit compilerflags in PyCF_MASK */
3825 flags |= (c->c_flags->cf_flags & PyCF_MASK);
3826
3827 n = PyDict_Size(c->u->u_freevars);
3828 if (n < 0)
3829 return -1;
3830 if (n == 0) {
3831 n = PyDict_Size(c->u->u_cellvars);
3832 if (n < 0)
3833 return -1;
3834 if (n == 0) {
3835 flags |= CO_NOFREE;
3836 }
3837 }
3838
3839 return flags;
3840 }
3841
3842 static PyCodeObject *
3843 makecode(struct compiler *c, struct assembler *a)
3844 {
3845 PyObject *tmp;
3846 PyCodeObject *co = NULL;
3847 PyObject *consts = NULL;
3848 PyObject *names = NULL;
3849 PyObject *varnames = NULL;
3850 PyObject *filename = NULL;
3851 PyObject *name = NULL;
3852 PyObject *freevars = NULL;
3853 PyObject *cellvars = NULL;
3854 PyObject *bytecode = NULL;
3855 int nlocals, flags;
3856
3857 tmp = dict_keys_inorder(c->u->u_consts, 0);
3858 if (!tmp)
3859 goto error;
3860 consts = PySequence_List(tmp); /* optimize_code requires a list */
3861 Py_DECREF(tmp);
3862
3863 names = dict_keys_inorder(c->u->u_names, 0);
3864 varnames = dict_keys_inorder(c->u->u_varnames, 0);
3865 if (!consts || !names || !varnames)
3866 goto error;
3867
3868 cellvars = dict_keys_inorder(c->u->u_cellvars, 0);
3869 if (!cellvars)
3870 goto error;
3871 freevars = dict_keys_inorder(c->u->u_freevars, PyTuple_Size(cellvars));
3872 if (!freevars)
3873 goto error;
3874 filename = PyString_FromString(c->c_filename);
3875 if (!filename)
3876 goto error;
3877
3878 nlocals = PyDict_Size(c->u->u_varnames);
3879 flags = compute_code_flags(c);
3880 if (flags < 0)
3881 goto error;
3882
3883 bytecode = PyCode_Optimize(a->a_bytecode, consts, names, a->a_lnotab);
3884 if (!bytecode)
3885 goto error;
3886
3887 tmp = PyList_AsTuple(consts); /* PyCode_New requires a tuple */
3888 if (!tmp)
3889 goto error;
3890 Py_DECREF(consts);
3891 consts = tmp;
3892
3893 co = PyCode_New(c->u->u_argcount, nlocals, stackdepth(c), flags,
3894 bytecode, consts, names, varnames,
3895 freevars, cellvars,
3896 filename, c->u->u_name,
3897 c->u->u_firstlineno,
3898 a->a_lnotab);
3899 error:
3900 Py_XDECREF(consts);
3901 Py_XDECREF(names);
3902 Py_XDECREF(varnames);
3903 Py_XDECREF(filename);
3904 Py_XDECREF(name);
3905 Py_XDECREF(freevars);
3906 Py_XDECREF(cellvars);
3907 Py_XDECREF(bytecode);
3908 return co;
3909 }
3910
3911
3912 /* For debugging purposes only */
3913 #if 0
3914 static void
3915 dump_instr(const struct instr *i)
3916 {
3917 const char *jrel = i->i_jrel ? "jrel " : "";
3918 const char *jabs = i->i_jabs ? "jabs " : "";
3919 char arg[128];
3920
3921 *arg = '\0';
3922 if (i->i_hasarg)
3923 sprintf(arg, "arg: %d ", i->i_oparg);
3924
3925 fprintf(stderr, "line: %d, opcode: %d %s%s%s\n",
3926 i->i_lineno, i->i_opcode, arg, jabs, jrel);
3927 }
3928
3929 static void
3930 dump_basicblock(const basicblock *b)
3931 {
3932 const char *seen = b->b_seen ? "seen " : "";
3933 const char *b_return = b->b_return ? "return " : "";
3934 fprintf(stderr, "used: %d, depth: %d, offset: %d %s%s\n",
3935 b->b_iused, b->b_startdepth, b->b_offset, seen, b_return);
3936 if (b->b_instr) {
3937 int i;
3938 for (i = 0; i < b->b_iused; i++) {
3939 fprintf(stderr, " [%02d] ", i);
3940 dump_instr(b->b_instr + i);
3941 }
3942 }
3943 }
3944 #endif
3945
3946 static PyCodeObject *
3947 assemble(struct compiler *c, int addNone)
3948 {
3949 basicblock *b, *entryblock;
3950 struct assembler a;
3951 int i, j, nblocks;
3952 PyCodeObject *co = NULL;
3953
3954 /* Make sure every block that falls off the end returns None.
3955 XXX NEXT_BLOCK() isn't quite right, because if the last
3956 block ends with a jump or return b_next shouldn't set.
3957 */
3958 if (!c->u->u_curblock->b_return) {
3959 NEXT_BLOCK(c);
3960 if (addNone)
3961 ADDOP_O(c, LOAD_CONST, Py_None, consts);
3962 ADDOP(c, RETURN_VALUE);
3963 }
3964
3965 nblocks = 0;
3966 entryblock = NULL;
3967 for (b = c->u->u_blocks; b != NULL; b = b->b_list) {
3968 nblocks++;
3969 entryblock = b;
3970 }
3971
3972 /* Set firstlineno if it wasn't explicitly set. */
3973 if (!c->u->u_firstlineno) {
3974 if (entryblock && entryblock->b_instr)
3975 c->u->u_firstlineno = entryblock->b_instr->i_lineno;
3976 else
3977 c->u->u_firstlineno = 1;
3978 }
3979 if (!assemble_init(&a, nblocks, c->u->u_firstlineno))
3980 goto error;
3981 dfs(c, entryblock, &a);
3982
3983 /* Can't modify the bytecode after computing jump offsets. */
3984 assemble_jump_offsets(&a, c);
3985
3986 /* Emit code in reverse postorder from dfs. */
3987 for (i = a.a_nblocks - 1; i >= 0; i--) {
3988 b = a.a_postorder[i];
3989 for (j = 0; j < b->b_iused; j++)
3990 if (!assemble_emit(&a, &b->b_instr[j]))
3991 goto error;
3992 }
3993
3994 if (_PyString_Resize(&a.a_lnotab, a.a_lnotab_off) < 0)
3995 goto error;
3996 if (_PyString_Resize(&a.a_bytecode, a.a_offset) < 0)
3997 goto error;
3998
3999 co = makecode(c, &a);
4000 error:
4001 assemble_free(&a);
4002 return co;
4003 }