2 /* Execute compiled code */
5 XXX speed up searching for keywords by using a dictionary
9 /* enable more aggressive intra-module optimizations, where available */
10 #define PY_LOCAL_AGGRESSIVE
15 #include "frameobject.h"
18 #include "structmember.h"
24 #define READ_TIMESTAMP(var)
28 typedef unsigned long long uint64
;
30 #if defined(__ppc__) /* <- Don't know if this is the correct symbol; this
31 section should work for GCC on any PowerPC
32 platform, irrespective of OS.
33 POWER? Who knows :-) */
35 #define READ_TIMESTAMP(var) ppc_getcounter(&var)
38 ppc_getcounter(uint64
*v
)
40 register unsigned long tbu
, tb
, tbu2
;
43 asm volatile ("mftbu %0" : "=r" (tbu
) );
44 asm volatile ("mftb %0" : "=r" (tb
) );
45 asm volatile ("mftbu %0" : "=r" (tbu2
));
46 if (__builtin_expect(tbu
!= tbu2
, 0)) goto loop
;
48 /* The slightly peculiar way of writing the next lines is
49 compiled better by GCC than any other way I tried. */
50 ((long*)(v
))[0] = tbu
;
54 #elif defined(__i386__)
56 /* this is for linux/x86 (and probably any other GCC/x86 combo) */
58 #define READ_TIMESTAMP(val) \
59 __asm__ __volatile__("rdtsc" : "=A" (val))
61 #elif defined(__x86_64__)
63 /* for gcc/x86_64, the "A" constraint in DI mode means *either* rax *or* rdx;
64 not edx:eax as it does for i386. Since rdtsc puts its result in edx:eax
65 even in 64-bit mode, we need to use "a" and "d" for the lower and upper
66 32-bit pieces of the result. */
68 #define READ_TIMESTAMP(val) \
69 __asm__ __volatile__("rdtsc" : \
70 "=a" (((int*)&(val))[0]), "=d" (((int*)&(val))[1]));
75 #error "Don't know how to implement timestamp counter for this architecture"
79 void dump_tsc(int opcode
, int ticked
, uint64 inst0
, uint64 inst1
,
80 uint64 loop0
, uint64 loop1
, uint64 intr0
, uint64 intr1
)
82 uint64 intr
, inst
, loop
;
83 PyThreadState
*tstate
= PyThreadState_Get();
84 if (!tstate
->interp
->tscdump
)
87 inst
= inst1
- inst0
- intr
;
88 loop
= loop1
- loop0
- intr
;
89 fprintf(stderr
, "opcode=%03d t=%d inst=%06lld loop=%06lld\n",
90 opcode
, ticked
, inst
, loop
);
95 /* Turn this on if your compiler chokes on the big switch: */
96 /* #define CASE_TOO_BIG 1 */
99 /* For debugging the interpreter: */
100 #define LLTRACE 1 /* Low-level trace feature */
101 #define CHECKEXC 1 /* Double-check exception checking */
104 typedef PyObject
*(*callproc
)(PyObject
*, PyObject
*, PyObject
*);
106 /* Forward declarations */
108 static PyObject
* call_function(PyObject
***, int, uint64
*, uint64
*);
110 static PyObject
* call_function(PyObject
***, int);
112 static PyObject
* fast_function(PyObject
*, PyObject
***, int, int, int);
113 static PyObject
* do_call(PyObject
*, PyObject
***, int, int);
114 static PyObject
* ext_do_call(PyObject
*, PyObject
***, int, int, int);
115 static PyObject
* update_keyword_args(PyObject
*, int, PyObject
***,
117 static PyObject
* update_star_args(int, int, PyObject
*, PyObject
***);
118 static PyObject
* load_args(PyObject
***, int);
119 #define CALL_FLAG_VAR 1
120 #define CALL_FLAG_KW 2
124 static int prtrace(PyObject
*, char *);
126 static int call_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*,
128 static int call_trace_protected(Py_tracefunc
, PyObject
*,
129 PyFrameObject
*, int, PyObject
*);
130 static void call_exc_trace(Py_tracefunc
, PyObject
*, PyFrameObject
*);
131 static int maybe_call_line_trace(Py_tracefunc
, PyObject
*,
132 PyFrameObject
*, int *, int *, int *);
134 static PyObject
* apply_slice(PyObject
*, PyObject
*, PyObject
*);
135 static int assign_slice(PyObject
*, PyObject
*,
136 PyObject
*, PyObject
*);
137 static PyObject
* cmp_outcome(int, PyObject
*, PyObject
*);
138 static PyObject
* import_from(PyObject
*, PyObject
*);
139 static int import_all_from(PyObject
*, PyObject
*);
140 static PyObject
* build_class(PyObject
*, PyObject
*, PyObject
*);
141 static int exec_statement(PyFrameObject
*,
142 PyObject
*, PyObject
*, PyObject
*);
143 static void set_exc_info(PyThreadState
*, PyObject
*, PyObject
*, PyObject
*);
144 static void reset_exc_info(PyThreadState
*);
145 static void format_exc_check_arg(PyObject
*, char *, PyObject
*);
146 static PyObject
* string_concatenate(PyObject
*, PyObject
*,
147 PyFrameObject
*, unsigned char *);
148 static PyObject
* kwd_as_string(PyObject
*);
149 static PyObject
* special_lookup(PyObject
*, char *, PyObject
**);
151 #define NAME_ERROR_MSG \
152 "name '%.200s' is not defined"
153 #define GLOBAL_NAME_ERROR_MSG \
154 "global name '%.200s' is not defined"
155 #define UNBOUNDLOCAL_ERROR_MSG \
156 "local variable '%.200s' referenced before assignment"
157 #define UNBOUNDFREE_ERROR_MSG \
158 "free variable '%.200s' referenced before assignment" \
159 " in enclosing scope"
161 /* Dynamic execution profile */
162 #ifdef DYNAMIC_EXECUTION_PROFILE
164 static long dxpairs
[257][256];
165 #define dxp dxpairs[256]
167 static long dxp
[256];
171 /* Function call profile */
174 static int pcall
[PCALL_NUM
];
177 #define PCALL_FUNCTION 1
178 #define PCALL_FAST_FUNCTION 2
179 #define PCALL_FASTER_FUNCTION 3
180 #define PCALL_METHOD 4
181 #define PCALL_BOUND_METHOD 5
182 #define PCALL_CFUNCTION 6
184 #define PCALL_GENERATOR 8
185 #define PCALL_OTHER 9
188 /* Notes about the statistics
192 FAST_FUNCTION means no argument tuple needs to be created.
193 FASTER_FUNCTION means that the fast-path frame setup code is used.
195 If there is a method call where the call can be optimized by changing
196 the argument tuple and calling the function directly, it gets recorded
199 As a result, the relationship among the statistics appears to be
200 PCALL_ALL == PCALL_FUNCTION + PCALL_METHOD - PCALL_BOUND_METHOD +
201 PCALL_CFUNCTION + PCALL_TYPE + PCALL_GENERATOR + PCALL_OTHER
202 PCALL_FUNCTION > PCALL_FAST_FUNCTION > PCALL_FASTER_FUNCTION
203 PCALL_METHOD > PCALL_BOUND_METHOD
206 #define PCALL(POS) pcall[POS]++
209 PyEval_GetCallStats(PyObject
*self
)
211 return Py_BuildValue("iiiiiiiiiii",
212 pcall
[0], pcall
[1], pcall
[2], pcall
[3],
213 pcall
[4], pcall
[5], pcall
[6], pcall
[7],
214 pcall
[8], pcall
[9], pcall
[10]);
220 PyEval_GetCallStats(PyObject
*self
)
233 #include "pythread.h"
235 static PyThread_type_lock interpreter_lock
= 0; /* This is the GIL */
236 static PyThread_type_lock pending_lock
= 0; /* for pending calls */
237 static long main_thread
= 0;
240 PyEval_ThreadsInitialized(void)
242 return interpreter_lock
!= 0;
246 PyEval_InitThreads(void)
248 if (interpreter_lock
)
250 interpreter_lock
= PyThread_allocate_lock();
251 PyThread_acquire_lock(interpreter_lock
, 1);
252 main_thread
= PyThread_get_thread_ident();
256 PyEval_AcquireLock(void)
258 PyThread_acquire_lock(interpreter_lock
, 1);
262 PyEval_ReleaseLock(void)
264 PyThread_release_lock(interpreter_lock
);
268 PyEval_AcquireThread(PyThreadState
*tstate
)
271 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
272 /* Check someone has called PyEval_InitThreads() to create the lock */
273 assert(interpreter_lock
);
274 PyThread_acquire_lock(interpreter_lock
, 1);
275 if (PyThreadState_Swap(tstate
) != NULL
)
277 "PyEval_AcquireThread: non-NULL old thread state");
281 PyEval_ReleaseThread(PyThreadState
*tstate
)
284 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
285 if (PyThreadState_Swap(NULL
) != tstate
)
286 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
287 PyThread_release_lock(interpreter_lock
);
290 /* This function is called from PyOS_AfterFork to ensure that newly
291 created child processes don't hold locks referring to threads which
292 are not running in the child process. (This could also be done using
293 pthread_atfork mechanism, at least for the pthreads implementation.) */
296 PyEval_ReInitThreads(void)
298 PyObject
*threading
, *result
;
299 PyThreadState
*tstate
;
301 if (!interpreter_lock
)
303 /*XXX Can't use PyThread_free_lock here because it does too
304 much error-checking. Doing this cleanly would require
305 adding a new function to each thread_*.h. Instead, just
306 create a new lock and waste a little bit of memory */
307 interpreter_lock
= PyThread_allocate_lock();
308 pending_lock
= PyThread_allocate_lock();
309 PyThread_acquire_lock(interpreter_lock
, 1);
310 main_thread
= PyThread_get_thread_ident();
312 /* Update the threading module with the new state.
314 tstate
= PyThreadState_GET();
315 threading
= PyMapping_GetItemString(tstate
->interp
->modules
,
317 if (threading
== NULL
) {
318 /* threading not imported */
322 result
= PyObject_CallMethod(threading
, "_after_fork", NULL
);
324 PyErr_WriteUnraisable(threading
);
327 Py_DECREF(threading
);
331 /* Functions save_thread and restore_thread are always defined so
332 dynamically loaded modules needn't be compiled separately for use
333 with and without threads: */
336 PyEval_SaveThread(void)
338 PyThreadState
*tstate
= PyThreadState_Swap(NULL
);
340 Py_FatalError("PyEval_SaveThread: NULL tstate");
342 if (interpreter_lock
)
343 PyThread_release_lock(interpreter_lock
);
349 PyEval_RestoreThread(PyThreadState
*tstate
)
352 Py_FatalError("PyEval_RestoreThread: NULL tstate");
354 if (interpreter_lock
) {
356 PyThread_acquire_lock(interpreter_lock
, 1);
360 PyThreadState_Swap(tstate
);
364 /* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
365 signal handlers or Mac I/O completion routines) can schedule calls
366 to a function to be called synchronously.
367 The synchronous function is called with one void* argument.
368 It should return 0 for success or -1 for failure -- failure should
369 be accompanied by an exception.
371 If registry succeeds, the registry function returns 0; if it fails
372 (e.g. due to too many pending calls) it returns -1 (without setting
373 an exception condition).
375 Note that because registry may occur from within signal handlers,
376 or other asynchronous events, calling malloc() is unsafe!
379 Any thread can schedule pending calls, but only the main thread
381 There is no facility to schedule calls to a particular thread, but
382 that should be easy to change, should that ever be required. In
383 that case, the static variables here should go into the python
390 /* The WITH_THREAD implementation is thread-safe. It allows
391 scheduling to be made from any thread, and even from an executing
395 #define NPENDINGCALLS 32
399 } pendingcalls
[NPENDINGCALLS
];
400 static int pendingfirst
= 0;
401 static int pendinglast
= 0;
402 static volatile int pendingcalls_to_do
= 1; /* trigger initialization of lock */
403 static char pendingbusy
= 0;
406 Py_AddPendingCall(int (*func
)(void *), void *arg
)
409 PyThread_type_lock lock
= pending_lock
;
411 /* try a few times for the lock. Since this mechanism is used
412 * for signal handling (on the main thread), there is a (slim)
413 * chance that a signal is delivered on the same thread while we
414 * hold the lock during the Py_MakePendingCalls() function.
415 * This avoids a deadlock in that case.
416 * Note that signals can be delivered on any thread. In particular,
417 * on Windows, a SIGINT is delivered on a system-created worker
419 * We also check for lock being NULL, in the unlikely case that
420 * this function is called before any bytecode evaluation takes place.
423 for (i
= 0; i
<100; i
++) {
424 if (PyThread_acquire_lock(lock
, NOWAIT_LOCK
))
432 j
= (i
+ 1) % NPENDINGCALLS
;
433 if (j
== pendingfirst
) {
434 result
= -1; /* Queue full */
436 pendingcalls
[i
].func
= func
;
437 pendingcalls
[i
].arg
= arg
;
440 /* signal main loop */
442 pendingcalls_to_do
= 1;
444 PyThread_release_lock(lock
);
449 Py_MakePendingCalls(void)
455 /* initial allocation of the lock */
456 pending_lock
= PyThread_allocate_lock();
457 if (pending_lock
== NULL
)
461 /* only service pending calls on main thread */
462 if (main_thread
&& PyThread_get_thread_ident() != main_thread
)
464 /* don't perform recursive pending calls */
468 /* perform a bounded number of calls, in case of recursion */
469 for (i
=0; i
<NPENDINGCALLS
; i
++) {
474 /* pop one item off the queue while holding the lock */
475 PyThread_acquire_lock(pending_lock
, WAIT_LOCK
);
477 if (j
== pendinglast
) {
478 func
= NULL
; /* Queue empty */
480 func
= pendingcalls
[j
].func
;
481 arg
= pendingcalls
[j
].arg
;
482 pendingfirst
= (j
+ 1) % NPENDINGCALLS
;
484 pendingcalls_to_do
= pendingfirst
!= pendinglast
;
485 PyThread_release_lock(pending_lock
);
486 /* having released the lock, perform the callback */
497 #else /* if ! defined WITH_THREAD */
500 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
501 This code is used for signal handling in python that isn't built
503 Don't use this implementation when Py_AddPendingCalls() can happen
504 on a different thread!
506 There are two possible race conditions:
507 (1) nested asynchronous calls to Py_AddPendingCall()
508 (2) AddPendingCall() calls made while pending calls are being processed.
510 (1) is very unlikely because typically signal delivery
511 is blocked during signal handling. So it should be impossible.
512 (2) is a real possibility.
513 The current code is safe against (2), but not against (1).
514 The safety against (2) is derived from the fact that only one
515 thread is present, interrupted by signals, and that the critical
516 section is protected with the "busy" variable. On Windows, which
517 delivers SIGINT on a system thread, this does not hold and therefore
518 Windows really shouldn't use this version.
519 The two threads could theoretically wiggle around the "busy" variable.
522 #define NPENDINGCALLS 32
526 } pendingcalls
[NPENDINGCALLS
];
527 static volatile int pendingfirst
= 0;
528 static volatile int pendinglast
= 0;
529 static volatile int pendingcalls_to_do
= 0;
532 Py_AddPendingCall(int (*func
)(void *), void *arg
)
534 static volatile int busy
= 0;
536 /* XXX Begin critical section */
541 j
= (i
+ 1) % NPENDINGCALLS
;
542 if (j
== pendingfirst
) {
544 return -1; /* Queue full */
546 pendingcalls
[i
].func
= func
;
547 pendingcalls
[i
].arg
= arg
;
551 pendingcalls_to_do
= 1; /* Signal main loop */
553 /* XXX End critical section */
558 Py_MakePendingCalls(void)
564 pendingcalls_to_do
= 0;
570 if (i
== pendinglast
)
571 break; /* Queue empty */
572 func
= pendingcalls
[i
].func
;
573 arg
= pendingcalls
[i
].arg
;
574 pendingfirst
= (i
+ 1) % NPENDINGCALLS
;
577 pendingcalls_to_do
= 1; /* We're not done yet */
585 #endif /* WITH_THREAD */
588 /* The interpreter's recursion limit */
590 #ifndef Py_DEFAULT_RECURSION_LIMIT
591 #define Py_DEFAULT_RECURSION_LIMIT 1000
593 static int recursion_limit
= Py_DEFAULT_RECURSION_LIMIT
;
594 int _Py_CheckRecursionLimit
= Py_DEFAULT_RECURSION_LIMIT
;
597 Py_GetRecursionLimit(void)
599 return recursion_limit
;
603 Py_SetRecursionLimit(int new_limit
)
605 recursion_limit
= new_limit
;
606 _Py_CheckRecursionLimit
= recursion_limit
;
609 /* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
610 if the recursion_depth reaches _Py_CheckRecursionLimit.
611 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
612 to guarantee that _Py_CheckRecursiveCall() is regularly called.
613 Without USE_STACKCHECK, there is no need for this. */
615 _Py_CheckRecursiveCall(char *where
)
617 PyThreadState
*tstate
= PyThreadState_GET();
619 #ifdef USE_STACKCHECK
620 if (PyOS_CheckStack()) {
621 --tstate
->recursion_depth
;
622 PyErr_SetString(PyExc_MemoryError
, "Stack overflow");
626 if (tstate
->recursion_depth
> recursion_limit
) {
627 --tstate
->recursion_depth
;
628 PyErr_Format(PyExc_RuntimeError
,
629 "maximum recursion depth exceeded%s",
633 _Py_CheckRecursionLimit
= recursion_limit
;
637 /* Status code for main loop (reason for stack unwind) */
639 WHY_NOT
= 0x0001, /* No error */
640 WHY_EXCEPTION
= 0x0002, /* Exception occurred */
641 WHY_RERAISE
= 0x0004, /* Exception re-raised by 'finally' */
642 WHY_RETURN
= 0x0008, /* 'return' statement */
643 WHY_BREAK
= 0x0010, /* 'break' statement */
644 WHY_CONTINUE
= 0x0020, /* 'continue' statement */
645 WHY_YIELD
= 0x0040 /* 'yield' operator */
648 static enum why_code
do_raise(PyObject
*, PyObject
*, PyObject
*);
649 static int unpack_iterable(PyObject
*, int, PyObject
**);
651 /* Records whether tracing is on for any thread. Counts the number of
652 threads for which tstate->c_tracefunc is non-NULL, so if the value
653 is 0, we know we don't have to check this thread's c_tracefunc.
654 This speeds up the if statement in PyEval_EvalFrameEx() after
656 static int _Py_TracingPossible
= 0;
658 /* for manipulating the thread switch and periodic "stuff" - used to be
659 per thread, now just a pair o' globals */
660 int _Py_CheckInterval
= 100;
661 volatile int _Py_Ticker
= 0; /* so that we hit a "tick" first thing */
664 PyEval_EvalCode(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
)
666 return PyEval_EvalCodeEx(co
,
668 (PyObject
**)NULL
, 0,
669 (PyObject
**)NULL
, 0,
670 (PyObject
**)NULL
, 0,
675 /* Interpreter main loop */
678 PyEval_EvalFrame(PyFrameObject
*f
) {
679 /* This is for backward compatibility with extension modules that
680 used this API; core interpreter code should call
681 PyEval_EvalFrameEx() */
682 return PyEval_EvalFrameEx(f
, 0);
686 PyEval_EvalFrameEx(PyFrameObject
*f
, int throwflag
)
691 register PyObject
**stack_pointer
; /* Next free slot in value stack */
692 register unsigned char *next_instr
;
693 register int opcode
; /* Current opcode */
694 register int oparg
; /* Current opcode argument, if any */
695 register enum why_code why
; /* Reason for block stack unwind */
696 register int err
; /* Error status -- nonzero if error */
697 register PyObject
*x
; /* Result object -- NULL if error */
698 register PyObject
*v
; /* Temporary objects popped off stack */
699 register PyObject
*w
;
700 register PyObject
*u
;
701 register PyObject
*t
;
702 register PyObject
*stream
= NULL
; /* for PRINT opcodes */
703 register PyObject
**fastlocals
, **freevars
;
704 PyObject
*retval
= NULL
; /* Return value */
705 PyThreadState
*tstate
= PyThreadState_GET();
708 /* when tracing we set things up so that
710 not (instr_lb <= current_bytecode_offset < instr_ub)
712 is true when the line being executed has changed. The
713 initial values are such as to make this false the first
714 time it is tested. */
715 int instr_ub
= -1, instr_lb
= 0, instr_prev
= -1;
717 unsigned char *first_instr
;
720 #if defined(Py_DEBUG) || defined(LLTRACE)
721 /* Make it easier to find out where we are with a debugger */
725 /* Tuple access macros */
728 #define GETITEM(v, i) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))
730 #define GETITEM(v, i) PyTuple_GetItem((v), (i))
734 /* Use Pentium timestamp counter to mark certain events:
735 inst0 -- beginning of switch statement for opcode dispatch
736 inst1 -- end of switch statement (may be skipped)
737 loop0 -- the top of the mainloop
738 loop1 -- place where control returns again to top of mainloop
740 intr1 -- beginning of long interruption
741 intr2 -- end of long interruption
743 Many opcodes call out to helper C functions. In some cases, the
744 time in those functions should be counted towards the time for the
745 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
746 calls another Python function; there's no point in charge all the
747 bytecode executed by the called function to the caller.
749 It's hard to make a useful judgement statically. In the presence
750 of operator overloading, it's impossible to tell if a call will
751 execute new Python code or not.
753 It's a case-by-case judgement. I'll use intr1 for the following
759 CALL_FUNCTION (and friends)
762 uint64 inst0
, inst1
, loop0
, loop1
, intr0
= 0, intr1
= 0;
765 READ_TIMESTAMP(inst0
);
766 READ_TIMESTAMP(inst1
);
767 READ_TIMESTAMP(loop0
);
768 READ_TIMESTAMP(loop1
);
770 /* shut up the compiler */
774 /* Code access macros */
776 #define INSTR_OFFSET() ((int)(next_instr - first_instr))
777 #define NEXTOP() (*next_instr++)
778 #define NEXTARG() (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
779 #define PEEKARG() ((next_instr[2]<<8) + next_instr[1])
780 #define JUMPTO(x) (next_instr = first_instr + (x))
781 #define JUMPBY(x) (next_instr += (x))
783 /* OpCode prediction macros
784 Some opcodes tend to come in pairs thus making it possible to
785 predict the second code when the first is run. For example,
786 GET_ITER is often followed by FOR_ITER. And FOR_ITER is often
787 followed by STORE_FAST or UNPACK_SEQUENCE.
789 Verifying the prediction costs a single high-speed test of a register
790 variable against a constant. If the pairing was good, then the
791 processor's own internal branch predication has a high likelihood of
792 success, resulting in a nearly zero-overhead transition to the
793 next opcode. A successful prediction saves a trip through the eval-loop
794 including its two unpredictable branches, the HAS_ARG test and the
795 switch-case. Combined with the processor's internal branch prediction,
796 a successful PREDICT has the effect of making the two opcodes run as if
797 they were a single new opcode with the bodies combined.
799 If collecting opcode statistics, your choices are to either keep the
800 predictions turned-on and interpret the results as if some opcodes
801 had been combined or turn-off predictions so that the opcode frequency
802 counter updates for both opcodes.
805 #ifdef DYNAMIC_EXECUTION_PROFILE
806 #define PREDICT(op) if (0) goto PRED_##op
808 #define PREDICT(op) if (*next_instr == op) goto PRED_##op
811 #define PREDICTED(op) PRED_##op: next_instr++
812 #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3
814 /* Stack manipulation macros */
816 /* The stack can grow at most MAXINT deep, as co_nlocals and
817 co_stacksize are ints. */
818 #define STACK_LEVEL() ((int)(stack_pointer - f->f_valuestack))
819 #define EMPTY() (STACK_LEVEL() == 0)
820 #define TOP() (stack_pointer[-1])
821 #define SECOND() (stack_pointer[-2])
822 #define THIRD() (stack_pointer[-3])
823 #define FOURTH() (stack_pointer[-4])
824 #define PEEK(n) (stack_pointer[-(n)])
825 #define SET_TOP(v) (stack_pointer[-1] = (v))
826 #define SET_SECOND(v) (stack_pointer[-2] = (v))
827 #define SET_THIRD(v) (stack_pointer[-3] = (v))
828 #define SET_FOURTH(v) (stack_pointer[-4] = (v))
829 #define SET_VALUE(n, v) (stack_pointer[-(n)] = (v))
830 #define BASIC_STACKADJ(n) (stack_pointer += n)
831 #define BASIC_PUSH(v) (*stack_pointer++ = (v))
832 #define BASIC_POP() (*--stack_pointer)
835 #define PUSH(v) { (void)(BASIC_PUSH(v), \
836 lltrace && prtrace(TOP(), "push")); \
837 assert(STACK_LEVEL() <= co->co_stacksize); }
838 #define POP() ((void)(lltrace && prtrace(TOP(), "pop")), \
840 #define STACKADJ(n) { (void)(BASIC_STACKADJ(n), \
841 lltrace && prtrace(TOP(), "stackadj")); \
842 assert(STACK_LEVEL() <= co->co_stacksize); }
843 #define EXT_POP(STACK_POINTER) ((void)(lltrace && \
844 prtrace((STACK_POINTER)[-1], "ext_pop")), \
847 #define PUSH(v) BASIC_PUSH(v)
848 #define POP() BASIC_POP()
849 #define STACKADJ(n) BASIC_STACKADJ(n)
850 #define EXT_POP(STACK_POINTER) (*--(STACK_POINTER))
853 /* Local variable macros */
855 #define GETLOCAL(i) (fastlocals[i])
857 /* The SETLOCAL() macro must not DECREF the local variable in-place and
858 then store the new value; it must copy the old value to a temporary
859 value, then store the new value, and then DECREF the temporary value.
860 This is because it is possible that during the DECREF the frame is
861 accessed by other code (e.g. a __del__ method or gc.collect()) and the
862 variable would be pointing to already-freed memory. */
863 #define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
864 GETLOCAL(i) = value; \
865 Py_XDECREF(tmp); } while (0)
873 if (Py_EnterRecursiveCall(""))
878 if (tstate
->use_tracing
) {
879 if (tstate
->c_tracefunc
!= NULL
) {
880 /* tstate->c_tracefunc, if defined, is a
881 function that will be called on *every* entry
882 to a code block. Its return value, if not
883 None, is a function that will be called at
884 the start of each executed line of code.
885 (Actually, the function must return itself
886 in order to continue tracing.) The trace
887 functions are called with three arguments:
888 a pointer to the current frame, a string
889 indicating why the function is called, and
890 an argument which depends on the situation.
891 The global trace function is also called
892 whenever an exception is detected. */
893 if (call_trace_protected(tstate
->c_tracefunc
,
895 f
, PyTrace_CALL
, Py_None
)) {
896 /* Trace function raised an error */
897 goto exit_eval_frame
;
900 if (tstate
->c_profilefunc
!= NULL
) {
901 /* Similar for c_profilefunc, except it needn't
902 return itself and isn't called for "line" events */
903 if (call_trace_protected(tstate
->c_profilefunc
,
904 tstate
->c_profileobj
,
905 f
, PyTrace_CALL
, Py_None
)) {
906 /* Profile function raised an error */
907 goto exit_eval_frame
;
913 names
= co
->co_names
;
914 consts
= co
->co_consts
;
915 fastlocals
= f
->f_localsplus
;
916 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
917 first_instr
= (unsigned char*) PyString_AS_STRING(co
->co_code
);
918 /* An explanation is in order for the next line.
920 f->f_lasti now refers to the index of the last instruction
921 executed. You might think this was obvious from the name, but
922 this wasn't always true before 2.3! PyFrame_New now sets
923 f->f_lasti to -1 (i.e. the index *before* the first instruction)
924 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
927 When the PREDICT() macros are enabled, some opcode pairs follow in
928 direct succession without updating f->f_lasti. A successful
929 prediction effectively links the two codes together as if they
930 were a single new opcode; accordingly,f->f_lasti will point to
931 the first code in the pair (for instance, GET_ITER followed by
932 FOR_ITER is effectively a single opcode and f->f_lasti will point
933 at to the beginning of the combined pair.)
935 next_instr
= first_instr
+ f
->f_lasti
+ 1;
936 stack_pointer
= f
->f_stacktop
;
937 assert(stack_pointer
!= NULL
);
938 f
->f_stacktop
= NULL
; /* remains NULL unless yield suspends frame */
941 lltrace
= PyDict_GetItemString(f
->f_globals
, "__lltrace__") != NULL
;
943 #if defined(Py_DEBUG) || defined(LLTRACE)
944 filename
= PyString_AsString(co
->co_filename
);
949 x
= Py_None
; /* Not a reference, just anything non-NULL */
952 if (throwflag
) { /* support for generator.throw() */
960 /* Almost surely, the opcode executed a break
961 or a continue, preventing inst1 from being set
962 on the way out of the loop.
964 READ_TIMESTAMP(inst1
);
967 dump_tsc(opcode
, ticked
, inst0
, inst1
, loop0
, loop1
,
973 READ_TIMESTAMP(loop0
);
975 assert(stack_pointer
>= f
->f_valuestack
); /* else underflow */
976 assert(STACK_LEVEL() <= co
->co_stacksize
); /* else overflow */
978 /* Do periodic things. Doing this every time through
979 the loop would add too much overhead, so we do it
980 only every Nth instruction. We also do it if
981 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
982 event needs attention (e.g. a signal handler or
983 async I/O handler); see Py_AddPendingCall() and
984 Py_MakePendingCalls() above. */
986 if (--_Py_Ticker
< 0) {
987 if (*next_instr
== SETUP_FINALLY
) {
988 /* Make the last opcode before
989 a try: finally: block uninterruptable. */
990 goto fast_next_opcode
;
992 _Py_Ticker
= _Py_CheckInterval
;
993 tstate
->tick_counter
++;
997 if (pendingcalls_to_do
) {
998 if (Py_MakePendingCalls() < 0) {
1002 if (pendingcalls_to_do
)
1003 /* MakePendingCalls() didn't succeed.
1004 Force early re-execution of this
1005 "periodic" code, possibly after
1010 if (interpreter_lock
) {
1011 /* Give another thread a chance */
1013 if (PyThreadState_Swap(NULL
) != tstate
)
1014 Py_FatalError("ceval: tstate mix-up");
1015 PyThread_release_lock(interpreter_lock
);
1017 /* Other threads may run now */
1019 PyThread_acquire_lock(interpreter_lock
, 1);
1020 if (PyThreadState_Swap(tstate
) != NULL
)
1021 Py_FatalError("ceval: orphan tstate");
1023 /* Check for thread interrupts */
1025 if (tstate
->async_exc
!= NULL
) {
1026 x
= tstate
->async_exc
;
1027 tstate
->async_exc
= NULL
;
1030 why
= WHY_EXCEPTION
;
1038 f
->f_lasti
= INSTR_OFFSET();
1040 /* line-by-line tracing support */
1042 if (_Py_TracingPossible
&&
1043 tstate
->c_tracefunc
!= NULL
&& !tstate
->tracing
) {
1044 /* see maybe_call_line_trace
1045 for expository comments */
1046 f
->f_stacktop
= stack_pointer
;
1048 err
= maybe_call_line_trace(tstate
->c_tracefunc
,
1050 f
, &instr_lb
, &instr_ub
,
1052 /* Reload possibly changed frame fields */
1054 if (f
->f_stacktop
!= NULL
) {
1055 stack_pointer
= f
->f_stacktop
;
1056 f
->f_stacktop
= NULL
;
1059 /* trace function raised an exception */
1064 /* Extract opcode and argument */
1067 oparg
= 0; /* allows oparg to be stored in a register because
1068 it doesn't have to be remembered across a full loop */
1069 if (HAS_ARG(opcode
))
1072 #ifdef DYNAMIC_EXECUTION_PROFILE
1074 dxpairs
[lastopcode
][opcode
]++;
1075 lastopcode
= opcode
;
1081 /* Instruction tracing */
1084 if (HAS_ARG(opcode
)) {
1085 printf("%d: %d, %d\n",
1086 f
->f_lasti
, opcode
, oparg
);
1090 f
->f_lasti
, opcode
);
1095 /* Main switch on opcode */
1096 READ_TIMESTAMP(inst0
);
1101 It is essential that any operation that fails sets either
1102 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1103 and that no operation that succeeds does this! */
1105 /* case STOP_CODE: this is an error! */
1108 goto fast_next_opcode
;
1111 x
= GETLOCAL(oparg
);
1115 goto fast_next_opcode
;
1117 format_exc_check_arg(PyExc_UnboundLocalError
,
1118 UNBOUNDLOCAL_ERROR_MSG
,
1119 PyTuple_GetItem(co
->co_varnames
, oparg
));
1123 x
= GETITEM(consts
, oparg
);
1126 goto fast_next_opcode
;
1128 PREDICTED_WITH_ARG(STORE_FAST
);
1132 goto fast_next_opcode
;
1137 goto fast_next_opcode
;
1144 goto fast_next_opcode
;
1153 goto fast_next_opcode
;
1164 goto fast_next_opcode
;
1170 goto fast_next_opcode
;
1181 goto fast_next_opcode
;
1182 } else if (oparg
== 3) {
1193 goto fast_next_opcode
;
1195 Py_FatalError("invalid argument to DUP_TOPX"
1196 " (bytecode corruption?)");
1197 /* Never returns, so don't bother to set why. */
1200 case UNARY_POSITIVE
:
1202 x
= PyNumber_Positive(v
);
1205 if (x
!= NULL
) continue;
1208 case UNARY_NEGATIVE
:
1210 x
= PyNumber_Negative(v
);
1213 if (x
!= NULL
) continue;
1218 err
= PyObject_IsTrue(v
);
1226 Py_INCREF(Py_False
);
1236 x
= PyObject_Repr(v
);
1239 if (x
!= NULL
) continue;
1244 x
= PyNumber_Invert(v
);
1247 if (x
!= NULL
) continue;
1253 x
= PyNumber_Power(v
, w
, Py_None
);
1257 if (x
!= NULL
) continue;
1260 case BINARY_MULTIPLY
:
1263 x
= PyNumber_Multiply(v
, w
);
1267 if (x
!= NULL
) continue;
1271 if (!_Py_QnewFlag
) {
1274 x
= PyNumber_Divide(v
, w
);
1278 if (x
!= NULL
) continue;
1281 /* -Qnew is in effect: fall through to
1282 BINARY_TRUE_DIVIDE */
1283 case BINARY_TRUE_DIVIDE
:
1286 x
= PyNumber_TrueDivide(v
, w
);
1290 if (x
!= NULL
) continue;
1293 case BINARY_FLOOR_DIVIDE
:
1296 x
= PyNumber_FloorDivide(v
, w
);
1300 if (x
!= NULL
) continue;
1306 if (PyString_CheckExact(v
))
1307 x
= PyString_Format(v
, w
);
1309 x
= PyNumber_Remainder(v
, w
);
1313 if (x
!= NULL
) continue;
1319 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1320 /* INLINE: int + int */
1321 register long a
, b
, i
;
1322 a
= PyInt_AS_LONG(v
);
1323 b
= PyInt_AS_LONG(w
);
1324 /* cast to avoid undefined behaviour
1326 i
= (long)((unsigned long)a
+ b
);
1327 if ((i
^a
) < 0 && (i
^b
) < 0)
1329 x
= PyInt_FromLong(i
);
1331 else if (PyString_CheckExact(v
) &&
1332 PyString_CheckExact(w
)) {
1333 x
= string_concatenate(v
, w
, f
, next_instr
);
1334 /* string_concatenate consumed the ref to v */
1335 goto skip_decref_vx
;
1339 x
= PyNumber_Add(v
, w
);
1345 if (x
!= NULL
) continue;
1348 case BINARY_SUBTRACT
:
1351 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1352 /* INLINE: int - int */
1353 register long a
, b
, i
;
1354 a
= PyInt_AS_LONG(v
);
1355 b
= PyInt_AS_LONG(w
);
1356 /* cast to avoid undefined behaviour
1358 i
= (long)((unsigned long)a
- b
);
1359 if ((i
^a
) < 0 && (i
^~b
) < 0)
1361 x
= PyInt_FromLong(i
);
1365 x
= PyNumber_Subtract(v
, w
);
1370 if (x
!= NULL
) continue;
1376 if (PyList_CheckExact(v
) && PyInt_CheckExact(w
)) {
1377 /* INLINE: list[int] */
1378 Py_ssize_t i
= PyInt_AsSsize_t(w
);
1380 i
+= PyList_GET_SIZE(v
);
1381 if (i
>= 0 && i
< PyList_GET_SIZE(v
)) {
1382 x
= PyList_GET_ITEM(v
, i
);
1390 x
= PyObject_GetItem(v
, w
);
1394 if (x
!= NULL
) continue;
1400 x
= PyNumber_Lshift(v
, w
);
1404 if (x
!= NULL
) continue;
1410 x
= PyNumber_Rshift(v
, w
);
1414 if (x
!= NULL
) continue;
1420 x
= PyNumber_And(v
, w
);
1424 if (x
!= NULL
) continue;
1430 x
= PyNumber_Xor(v
, w
);
1434 if (x
!= NULL
) continue;
1440 x
= PyNumber_Or(v
, w
);
1444 if (x
!= NULL
) continue;
1450 err
= PyList_Append(v
, w
);
1453 PREDICT(JUMP_ABSOLUTE
);
1460 v
= stack_pointer
[-oparg
];
1461 err
= PySet_Add(v
, w
);
1464 PREDICT(JUMP_ABSOLUTE
);
1472 x
= PyNumber_InPlacePower(v
, w
, Py_None
);
1476 if (x
!= NULL
) continue;
1479 case INPLACE_MULTIPLY
:
1482 x
= PyNumber_InPlaceMultiply(v
, w
);
1486 if (x
!= NULL
) continue;
1489 case INPLACE_DIVIDE
:
1490 if (!_Py_QnewFlag
) {
1493 x
= PyNumber_InPlaceDivide(v
, w
);
1497 if (x
!= NULL
) continue;
1500 /* -Qnew is in effect: fall through to
1501 INPLACE_TRUE_DIVIDE */
1502 case INPLACE_TRUE_DIVIDE
:
1505 x
= PyNumber_InPlaceTrueDivide(v
, w
);
1509 if (x
!= NULL
) continue;
1512 case INPLACE_FLOOR_DIVIDE
:
1515 x
= PyNumber_InPlaceFloorDivide(v
, w
);
1519 if (x
!= NULL
) continue;
1522 case INPLACE_MODULO
:
1525 x
= PyNumber_InPlaceRemainder(v
, w
);
1529 if (x
!= NULL
) continue;
1535 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1536 /* INLINE: int + int */
1537 register long a
, b
, i
;
1538 a
= PyInt_AS_LONG(v
);
1539 b
= PyInt_AS_LONG(w
);
1541 if ((i
^a
) < 0 && (i
^b
) < 0)
1543 x
= PyInt_FromLong(i
);
1545 else if (PyString_CheckExact(v
) &&
1546 PyString_CheckExact(w
)) {
1547 x
= string_concatenate(v
, w
, f
, next_instr
);
1548 /* string_concatenate consumed the ref to v */
1553 x
= PyNumber_InPlaceAdd(v
, w
);
1559 if (x
!= NULL
) continue;
1562 case INPLACE_SUBTRACT
:
1565 if (PyInt_CheckExact(v
) && PyInt_CheckExact(w
)) {
1566 /* INLINE: int - int */
1567 register long a
, b
, i
;
1568 a
= PyInt_AS_LONG(v
);
1569 b
= PyInt_AS_LONG(w
);
1571 if ((i
^a
) < 0 && (i
^~b
) < 0)
1573 x
= PyInt_FromLong(i
);
1577 x
= PyNumber_InPlaceSubtract(v
, w
);
1582 if (x
!= NULL
) continue;
1585 case INPLACE_LSHIFT
:
1588 x
= PyNumber_InPlaceLshift(v
, w
);
1592 if (x
!= NULL
) continue;
1595 case INPLACE_RSHIFT
:
1598 x
= PyNumber_InPlaceRshift(v
, w
);
1602 if (x
!= NULL
) continue;
1608 x
= PyNumber_InPlaceAnd(v
, w
);
1612 if (x
!= NULL
) continue;
1618 x
= PyNumber_InPlaceXor(v
, w
);
1622 if (x
!= NULL
) continue;
1628 x
= PyNumber_InPlaceOr(v
, w
);
1632 if (x
!= NULL
) continue;
1639 if ((opcode
-SLICE
) & 2)
1643 if ((opcode
-SLICE
) & 1)
1648 x
= apply_slice(u
, v
, w
);
1653 if (x
!= NULL
) continue;
1660 if ((opcode
-STORE_SLICE
) & 2)
1664 if ((opcode
-STORE_SLICE
) & 1)
1670 err
= assign_slice(u
, v
, w
, t
); /* u[v:w] = t */
1675 if (err
== 0) continue;
1678 case DELETE_SLICE
+0:
1679 case DELETE_SLICE
+1:
1680 case DELETE_SLICE
+2:
1681 case DELETE_SLICE
+3:
1682 if ((opcode
-DELETE_SLICE
) & 2)
1686 if ((opcode
-DELETE_SLICE
) & 1)
1691 err
= assign_slice(u
, v
, w
, (PyObject
*)NULL
);
1696 if (err
== 0) continue;
1705 err
= PyObject_SetItem(v
, w
, u
);
1709 if (err
== 0) continue;
1717 err
= PyObject_DelItem(v
, w
);
1720 if (err
== 0) continue;
1725 w
= PySys_GetObject("displayhook");
1727 PyErr_SetString(PyExc_RuntimeError
,
1728 "lost sys.displayhook");
1733 x
= PyTuple_Pack(1, v
);
1738 w
= PyEval_CallObject(w
, x
);
1749 /* fall through to PRINT_ITEM */
1753 if (stream
== NULL
|| stream
== Py_None
) {
1754 w
= PySys_GetObject("stdout");
1756 PyErr_SetString(PyExc_RuntimeError
,
1761 /* PyFile_SoftSpace() can exececute arbitrary code
1762 if sys.stdout is an instance with a __getattr__.
1763 If __getattr__ raises an exception, w will
1764 be freed, so we need to prevent that temporarily. */
1766 if (w
!= NULL
&& PyFile_SoftSpace(w
, 0))
1767 err
= PyFile_WriteString(" ", w
);
1769 err
= PyFile_WriteObject(v
, w
, Py_PRINT_RAW
);
1771 /* XXX move into writeobject() ? */
1772 if (PyString_Check(v
)) {
1773 char *s
= PyString_AS_STRING(v
);
1774 Py_ssize_t len
= PyString_GET_SIZE(v
);
1776 !isspace(Py_CHARMASK(s
[len
-1])) ||
1778 PyFile_SoftSpace(w
, 1);
1780 #ifdef Py_USING_UNICODE
1781 else if (PyUnicode_Check(v
)) {
1782 Py_UNICODE
*s
= PyUnicode_AS_UNICODE(v
);
1783 Py_ssize_t len
= PyUnicode_GET_SIZE(v
);
1785 !Py_UNICODE_ISSPACE(s
[len
-1]) ||
1787 PyFile_SoftSpace(w
, 1);
1791 PyFile_SoftSpace(w
, 1);
1801 case PRINT_NEWLINE_TO
:
1803 /* fall through to PRINT_NEWLINE */
1806 if (stream
== NULL
|| stream
== Py_None
) {
1807 w
= PySys_GetObject("stdout");
1809 PyErr_SetString(PyExc_RuntimeError
,
1811 why
= WHY_EXCEPTION
;
1815 /* w.write() may replace sys.stdout, so we
1816 * have to keep our reference to it */
1818 err
= PyFile_WriteString("\n", w
);
1820 PyFile_SoftSpace(w
, 0);
1829 default: switch (opcode
) {
1835 u
= POP(); /* traceback */
1838 v
= POP(); /* value */
1841 w
= POP(); /* exc */
1842 case 0: /* Fallthrough */
1843 why
= do_raise(w
, v
, u
);
1846 PyErr_SetString(PyExc_SystemError
,
1847 "bad RAISE_VARARGS oparg");
1848 why
= WHY_EXCEPTION
;
1854 if ((x
= f
->f_locals
) != NULL
) {
1859 PyErr_SetString(PyExc_SystemError
, "no locals");
1865 goto fast_block_end
;
1869 f
->f_stacktop
= stack_pointer
;
1878 READ_TIMESTAMP(intr0
);
1879 err
= exec_statement(f
, u
, v
, w
);
1880 READ_TIMESTAMP(intr1
);
1888 PyTryBlock
*b
= PyFrame_BlockPop(f
);
1889 while (STACK_LEVEL() > b
->b_level
) {
1896 PREDICTED(END_FINALLY
);
1899 if (PyInt_Check(v
)) {
1900 why
= (enum why_code
) PyInt_AS_LONG(v
);
1901 assert(why
!= WHY_YIELD
);
1902 if (why
== WHY_RETURN
||
1903 why
== WHY_CONTINUE
)
1906 else if (PyExceptionClass_Check(v
) ||
1907 PyString_Check(v
)) {
1910 PyErr_Restore(v
, w
, u
);
1914 else if (v
!= Py_None
) {
1915 PyErr_SetString(PyExc_SystemError
,
1916 "'finally' pops bad exception");
1917 why
= WHY_EXCEPTION
;
1927 x
= build_class(u
, v
, w
);
1935 w
= GETITEM(names
, oparg
);
1937 if ((x
= f
->f_locals
) != NULL
) {
1938 if (PyDict_CheckExact(x
))
1939 err
= PyDict_SetItem(x
, w
, v
);
1941 err
= PyObject_SetItem(x
, w
, v
);
1943 if (err
== 0) continue;
1946 PyErr_Format(PyExc_SystemError
,
1947 "no locals found when storing %s",
1952 w
= GETITEM(names
, oparg
);
1953 if ((x
= f
->f_locals
) != NULL
) {
1954 if ((err
= PyObject_DelItem(x
, w
)) != 0)
1955 format_exc_check_arg(PyExc_NameError
,
1960 PyErr_Format(PyExc_SystemError
,
1961 "no locals when deleting %s",
1965 PREDICTED_WITH_ARG(UNPACK_SEQUENCE
);
1966 case UNPACK_SEQUENCE
:
1968 if (PyTuple_CheckExact(v
) &&
1969 PyTuple_GET_SIZE(v
) == oparg
) {
1970 PyObject
**items
= \
1971 ((PyTupleObject
*)v
)->ob_item
;
1979 } else if (PyList_CheckExact(v
) &&
1980 PyList_GET_SIZE(v
) == oparg
) {
1981 PyObject
**items
= \
1982 ((PyListObject
*)v
)->ob_item
;
1988 } else if (unpack_iterable(v
, oparg
,
1989 stack_pointer
+ oparg
)) {
1992 /* unpack_iterable() raised an exception */
1993 why
= WHY_EXCEPTION
;
1999 w
= GETITEM(names
, oparg
);
2003 err
= PyObject_SetAttr(v
, w
, u
); /* v.w = u */
2006 if (err
== 0) continue;
2010 w
= GETITEM(names
, oparg
);
2012 err
= PyObject_SetAttr(v
, w
, (PyObject
*)NULL
);
2018 w
= GETITEM(names
, oparg
);
2020 err
= PyDict_SetItem(f
->f_globals
, w
, v
);
2022 if (err
== 0) continue;
2026 w
= GETITEM(names
, oparg
);
2027 if ((err
= PyDict_DelItem(f
->f_globals
, w
)) != 0)
2028 format_exc_check_arg(
2029 PyExc_NameError
, GLOBAL_NAME_ERROR_MSG
, w
);
2033 w
= GETITEM(names
, oparg
);
2034 if ((v
= f
->f_locals
) == NULL
) {
2035 PyErr_Format(PyExc_SystemError
,
2036 "no locals when loading %s",
2038 why
= WHY_EXCEPTION
;
2041 if (PyDict_CheckExact(v
)) {
2042 x
= PyDict_GetItem(v
, w
);
2046 x
= PyObject_GetItem(v
, w
);
2047 if (x
== NULL
&& PyErr_Occurred()) {
2048 if (!PyErr_ExceptionMatches(
2055 x
= PyDict_GetItem(f
->f_globals
, w
);
2057 x
= PyDict_GetItem(f
->f_builtins
, w
);
2059 format_exc_check_arg(
2071 w
= GETITEM(names
, oparg
);
2072 if (PyString_CheckExact(w
)) {
2073 /* Inline the PyDict_GetItem() calls.
2074 WARNING: this is an extreme speed hack.
2075 Do not try this at home. */
2076 long hash
= ((PyStringObject
*)w
)->ob_shash
;
2080 d
= (PyDictObject
*)(f
->f_globals
);
2081 e
= d
->ma_lookup(d
, w
, hash
);
2092 d
= (PyDictObject
*)(f
->f_builtins
);
2093 e
= d
->ma_lookup(d
, w
, hash
);
2104 goto load_global_error
;
2107 /* This is the un-inlined version of the code above */
2108 x
= PyDict_GetItem(f
->f_globals
, w
);
2110 x
= PyDict_GetItem(f
->f_builtins
, w
);
2113 format_exc_check_arg(
2115 GLOBAL_NAME_ERROR_MSG
, w
);
2124 x
= GETLOCAL(oparg
);
2126 SETLOCAL(oparg
, NULL
);
2129 format_exc_check_arg(
2130 PyExc_UnboundLocalError
,
2131 UNBOUNDLOCAL_ERROR_MSG
,
2132 PyTuple_GetItem(co
->co_varnames
, oparg
)
2137 x
= freevars
[oparg
];
2140 if (x
!= NULL
) continue;
2144 x
= freevars
[oparg
];
2151 /* Don't stomp existing exception */
2152 if (PyErr_Occurred())
2154 if (oparg
< PyTuple_GET_SIZE(co
->co_cellvars
)) {
2155 v
= PyTuple_GET_ITEM(co
->co_cellvars
,
2157 format_exc_check_arg(
2158 PyExc_UnboundLocalError
,
2159 UNBOUNDLOCAL_ERROR_MSG
,
2162 v
= PyTuple_GET_ITEM(co
->co_freevars
, oparg
-
2163 PyTuple_GET_SIZE(co
->co_cellvars
));
2164 format_exc_check_arg(PyExc_NameError
,
2165 UNBOUNDFREE_ERROR_MSG
, v
);
2171 x
= freevars
[oparg
];
2177 x
= PyTuple_New(oparg
);
2179 for (; --oparg
>= 0;) {
2181 PyTuple_SET_ITEM(x
, oparg
, w
);
2189 x
= PyList_New(oparg
);
2191 for (; --oparg
>= 0;) {
2193 PyList_SET_ITEM(x
, oparg
, w
);
2201 x
= PySet_New(NULL
);
2203 for (; --oparg
>= 0;) {
2206 err
= PySet_Add(x
, w
);
2220 x
= _PyDict_NewPresized((Py_ssize_t
)oparg
);
2222 if (x
!= NULL
) continue;
2226 w
= TOP(); /* key */
2227 u
= SECOND(); /* value */
2228 v
= THIRD(); /* dict */
2230 assert (PyDict_CheckExact(v
));
2231 err
= PyDict_SetItem(v
, w
, u
); /* v[w] = u */
2234 if (err
== 0) continue;
2238 w
= TOP(); /* key */
2239 u
= SECOND(); /* value */
2241 v
= stack_pointer
[-oparg
]; /* dict */
2242 assert (PyDict_CheckExact(v
));
2243 err
= PyDict_SetItem(v
, w
, u
); /* v[w] = u */
2247 PREDICT(JUMP_ABSOLUTE
);
2253 w
= GETITEM(names
, oparg
);
2255 x
= PyObject_GetAttr(v
, w
);
2258 if (x
!= NULL
) continue;
2264 if (PyInt_CheckExact(w
) && PyInt_CheckExact(v
)) {
2265 /* INLINE: cmp(int, int) */
2268 a
= PyInt_AS_LONG(v
);
2269 b
= PyInt_AS_LONG(w
);
2271 case PyCmp_LT
: res
= a
< b
; break;
2272 case PyCmp_LE
: res
= a
<= b
; break;
2273 case PyCmp_EQ
: res
= a
== b
; break;
2274 case PyCmp_NE
: res
= a
!= b
; break;
2275 case PyCmp_GT
: res
= a
> b
; break;
2276 case PyCmp_GE
: res
= a
>= b
; break;
2277 case PyCmp_IS
: res
= v
== w
; break;
2278 case PyCmp_IS_NOT
: res
= v
!= w
; break;
2279 default: goto slow_compare
;
2281 x
= res
? Py_True
: Py_False
;
2286 x
= cmp_outcome(oparg
, v
, w
);
2291 if (x
== NULL
) break;
2292 PREDICT(POP_JUMP_IF_FALSE
);
2293 PREDICT(POP_JUMP_IF_TRUE
);
2297 w
= GETITEM(names
, oparg
);
2298 x
= PyDict_GetItemString(f
->f_builtins
, "__import__");
2300 PyErr_SetString(PyExc_ImportError
,
2301 "__import__ not found");
2307 if (PyInt_AsLong(u
) != -1 || PyErr_Occurred())
2311 f
->f_locals
== NULL
?
2312 Py_None
: f
->f_locals
,
2319 f
->f_locals
== NULL
?
2320 Py_None
: f
->f_locals
,
2330 READ_TIMESTAMP(intr0
);
2332 x
= PyEval_CallObject(v
, w
);
2334 READ_TIMESTAMP(intr1
);
2337 if (x
!= NULL
) continue;
2342 PyFrame_FastToLocals(f
);
2343 if ((x
= f
->f_locals
) == NULL
) {
2344 PyErr_SetString(PyExc_SystemError
,
2345 "no locals found during 'import *'");
2348 READ_TIMESTAMP(intr0
);
2349 err
= import_all_from(x
, v
);
2350 READ_TIMESTAMP(intr1
);
2351 PyFrame_LocalsToFast(f
, 0);
2353 if (err
== 0) continue;
2357 w
= GETITEM(names
, oparg
);
2359 READ_TIMESTAMP(intr0
);
2360 x
= import_from(v
, w
);
2361 READ_TIMESTAMP(intr1
);
2363 if (x
!= NULL
) continue;
2368 goto fast_next_opcode
;
2370 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE
);
2371 case POP_JUMP_IF_FALSE
:
2375 goto fast_next_opcode
;
2377 if (w
== Py_False
) {
2380 goto fast_next_opcode
;
2382 err
= PyObject_IsTrue(w
);
2392 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE
);
2393 case POP_JUMP_IF_TRUE
:
2395 if (w
== Py_False
) {
2397 goto fast_next_opcode
;
2402 goto fast_next_opcode
;
2404 err
= PyObject_IsTrue(w
);
2416 case JUMP_IF_FALSE_OR_POP
:
2421 goto fast_next_opcode
;
2423 if (w
== Py_False
) {
2425 goto fast_next_opcode
;
2427 err
= PyObject_IsTrue(w
);
2439 case JUMP_IF_TRUE_OR_POP
:
2441 if (w
== Py_False
) {
2444 goto fast_next_opcode
;
2448 goto fast_next_opcode
;
2450 err
= PyObject_IsTrue(w
);
2455 else if (err
== 0) {
2463 PREDICTED_WITH_ARG(JUMP_ABSOLUTE
);
2467 /* Enabling this path speeds-up all while and for-loops by bypassing
2468 the per-loop checks for signals. By default, this should be turned-off
2469 because it prevents detection of a control-break in tight loops like
2470 "while 1: pass". Compile with this option turned-on when you need
2471 the speed-up and do not need break checking inside tight loops (ones
2472 that contain only instructions ending with goto fast_next_opcode).
2474 goto fast_next_opcode
;
2480 /* before: [obj]; after [getiter(obj)] */
2482 x
= PyObject_GetIter(v
);
2492 PREDICTED_WITH_ARG(FOR_ITER
);
2494 /* before: [iter]; after: [iter, iter()] *or* [] */
2496 x
= (*v
->ob_type
->tp_iternext
)(v
);
2499 PREDICT(STORE_FAST
);
2500 PREDICT(UNPACK_SEQUENCE
);
2503 if (PyErr_Occurred()) {
2504 if (!PyErr_ExceptionMatches(
2505 PyExc_StopIteration
))
2509 /* iterator ended normally */
2517 goto fast_block_end
;
2520 retval
= PyInt_FromLong(oparg
);
2526 goto fast_block_end
;
2531 /* NOTE: If you add any new block-setup opcodes that
2532 are not try/except/finally handlers, you may need
2533 to update the PyGen_NeedsFinalizing() function.
2536 PyFrame_BlockSetup(f
, opcode
, INSTR_OFFSET() + oparg
,
2542 static PyObject
*exit
, *enter
;
2544 x
= special_lookup(w
, "__exit__", &exit
);
2548 u
= special_lookup(w
, "__enter__", &enter
);
2554 x
= PyObject_CallFunctionObjArgs(u
, NULL
);
2558 /* Setup a finally block (SETUP_WITH as a block is
2559 equivalent to SETUP_FINALLY except it normalizes
2560 the exception) before pushing the result of
2561 __enter__ on the stack. */
2562 PyFrame_BlockSetup(f
, SETUP_WITH
, INSTR_OFFSET() + oparg
,
2571 /* At the top of the stack are 1-3 values indicating
2572 how/why we entered the finally clause:
2574 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2575 - TOP = WHY_*; no retval below it
2576 - (TOP, SECOND, THIRD) = exc_info()
2577 Below them is EXIT, the context.__exit__ bound method.
2578 In the last case, we must call
2579 EXIT(TOP, SECOND, THIRD)
2580 otherwise we must call
2581 EXIT(None, None, None)
2583 In all cases, we remove EXIT from the stack, leaving
2584 the rest in the same order.
2586 In addition, if the stack represents an exception,
2587 *and* the function call returns a 'true' value, we
2588 "zap" this information, to prevent END_FINALLY from
2589 re-raising the exception. (But non-local gotos
2590 should still be resumed.)
2593 PyObject
*exit_func
;
2601 else if (PyInt_Check(u
)) {
2602 switch(PyInt_AS_LONG(u
)) {
2605 /* Retval in TOP. */
2606 exit_func
= SECOND();
2615 u
= v
= w
= Py_None
;
2620 exit_func
= THIRD();
2625 /* XXX Not the fastest way to call it... */
2626 x
= PyObject_CallFunctionObjArgs(exit_func
, u
, v
, w
,
2628 Py_DECREF(exit_func
);
2630 break; /* Go to error exit */
2633 err
= PyObject_IsTrue(x
);
2639 break; /* Go to error exit */
2642 /* There was an exception and a true return */
2650 /* The stack was rearranged to remove EXIT
2651 above. Let END_FINALLY do its thing */
2653 PREDICT(END_FINALLY
);
2663 x
= call_function(&sp
, oparg
, &intr0
, &intr1
);
2665 x
= call_function(&sp
, oparg
);
2674 case CALL_FUNCTION_VAR
:
2675 case CALL_FUNCTION_KW
:
2676 case CALL_FUNCTION_VAR_KW
:
2678 int na
= oparg
& 0xff;
2679 int nk
= (oparg
>>8) & 0xff;
2680 int flags
= (opcode
- CALL_FUNCTION
) & 3;
2681 int n
= na
+ 2 * nk
;
2682 PyObject
**pfunc
, *func
, **sp
;
2684 if (flags
& CALL_FLAG_VAR
)
2686 if (flags
& CALL_FLAG_KW
)
2688 pfunc
= stack_pointer
- n
- 1;
2691 if (PyMethod_Check(func
)
2692 && PyMethod_GET_SELF(func
) != NULL
) {
2693 PyObject
*self
= PyMethod_GET_SELF(func
);
2695 func
= PyMethod_GET_FUNCTION(func
);
2703 READ_TIMESTAMP(intr0
);
2704 x
= ext_do_call(func
, &sp
, flags
, na
, nk
);
2705 READ_TIMESTAMP(intr1
);
2709 while (stack_pointer
> pfunc
) {
2720 v
= POP(); /* code object */
2721 x
= PyFunction_New(v
, f
->f_globals
);
2723 /* XXX Maybe this should be a separate opcode? */
2724 if (x
!= NULL
&& oparg
> 0) {
2725 v
= PyTuple_New(oparg
);
2731 while (--oparg
>= 0) {
2733 PyTuple_SET_ITEM(v
, oparg
, w
);
2735 err
= PyFunction_SetDefaults(x
, v
);
2743 v
= POP(); /* code object */
2744 x
= PyFunction_New(v
, f
->f_globals
);
2748 if (PyFunction_SetClosure(x
, v
) != 0) {
2749 /* Can't happen unless bytecode is corrupt. */
2750 why
= WHY_EXCEPTION
;
2754 if (x
!= NULL
&& oparg
> 0) {
2755 v
= PyTuple_New(oparg
);
2761 while (--oparg
>= 0) {
2763 PyTuple_SET_ITEM(v
, oparg
, w
);
2765 if (PyFunction_SetDefaults(x
, v
) != 0) {
2766 /* Can't happen unless
2767 PyFunction_SetDefaults changes. */
2768 why
= WHY_EXCEPTION
;
2783 x
= PySlice_New(u
, v
, w
);
2788 if (x
!= NULL
) continue;
2793 oparg
= oparg
<<16 | NEXTARG();
2794 goto dispatch_opcode
;
2798 "XXX lineno: %d, opcode: %d\n",
2799 PyFrame_GetLineNumber(f
),
2801 PyErr_SetString(PyExc_SystemError
, "unknown opcode");
2802 why
= WHY_EXCEPTION
;
2813 READ_TIMESTAMP(inst1
);
2815 /* Quickly continue if no error occurred */
2817 if (why
== WHY_NOT
) {
2818 if (err
== 0 && x
!= NULL
) {
2820 /* This check is expensive! */
2821 if (PyErr_Occurred())
2823 "XXX undetected error\n");
2826 READ_TIMESTAMP(loop1
);
2827 continue; /* Normal, fast path */
2832 why
= WHY_EXCEPTION
;
2837 /* Double-check exception status */
2839 if (why
== WHY_EXCEPTION
|| why
== WHY_RERAISE
) {
2840 if (!PyErr_Occurred()) {
2841 PyErr_SetString(PyExc_SystemError
,
2842 "error return without exception set");
2843 why
= WHY_EXCEPTION
;
2848 /* This check is expensive! */
2849 if (PyErr_Occurred()) {
2851 sprintf(buf
, "Stack unwind with exception "
2852 "set and why=%d", why
);
2858 /* Log traceback info if this is a real exception */
2860 if (why
== WHY_EXCEPTION
) {
2861 PyTraceBack_Here(f
);
2863 if (tstate
->c_tracefunc
!= NULL
)
2864 call_exc_trace(tstate
->c_tracefunc
,
2865 tstate
->c_traceobj
, f
);
2868 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2870 if (why
== WHY_RERAISE
)
2871 why
= WHY_EXCEPTION
;
2873 /* Unwind stacks if a (pseudo) exception occurred */
2876 while (why
!= WHY_NOT
&& f
->f_iblock
> 0) {
2877 /* Peek at the current block. */
2878 PyTryBlock
*b
= &f
->f_blockstack
[f
->f_iblock
- 1];
2880 assert(why
!= WHY_YIELD
);
2881 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_CONTINUE
) {
2883 JUMPTO(PyInt_AS_LONG(retval
));
2888 /* Now we have to pop the block. */
2891 while (STACK_LEVEL() > b
->b_level
) {
2895 if (b
->b_type
== SETUP_LOOP
&& why
== WHY_BREAK
) {
2897 JUMPTO(b
->b_handler
);
2900 if (b
->b_type
== SETUP_FINALLY
||
2901 (b
->b_type
== SETUP_EXCEPT
&&
2902 why
== WHY_EXCEPTION
) ||
2903 b
->b_type
== SETUP_WITH
) {
2904 if (why
== WHY_EXCEPTION
) {
2905 PyObject
*exc
, *val
, *tb
;
2906 PyErr_Fetch(&exc
, &val
, &tb
);
2911 /* Make the raw exception data
2912 available to the handler,
2913 so a program can emulate the
2914 Python main loop. Don't do
2915 this for 'finally'. */
2916 if (b
->b_type
== SETUP_EXCEPT
||
2917 b
->b_type
== SETUP_WITH
) {
2918 PyErr_NormalizeException(
2920 set_exc_info(tstate
,
2932 if (why
& (WHY_RETURN
| WHY_CONTINUE
))
2934 v
= PyInt_FromLong((long)why
);
2938 JUMPTO(b
->b_handler
);
2941 } /* unwind stack */
2943 /* End the loop if we still have an error (or return) */
2947 READ_TIMESTAMP(loop1
);
2951 assert(why
!= WHY_YIELD
);
2952 /* Pop remaining stack entries. */
2958 if (why
!= WHY_RETURN
)
2962 if (tstate
->use_tracing
) {
2963 if (tstate
->c_tracefunc
) {
2964 if (why
== WHY_RETURN
|| why
== WHY_YIELD
) {
2965 if (call_trace(tstate
->c_tracefunc
,
2966 tstate
->c_traceobj
, f
,
2967 PyTrace_RETURN
, retval
)) {
2970 why
= WHY_EXCEPTION
;
2973 else if (why
== WHY_EXCEPTION
) {
2974 call_trace_protected(tstate
->c_tracefunc
,
2975 tstate
->c_traceobj
, f
,
2976 PyTrace_RETURN
, NULL
);
2979 if (tstate
->c_profilefunc
) {
2980 if (why
== WHY_EXCEPTION
)
2981 call_trace_protected(tstate
->c_profilefunc
,
2982 tstate
->c_profileobj
, f
,
2983 PyTrace_RETURN
, NULL
);
2984 else if (call_trace(tstate
->c_profilefunc
,
2985 tstate
->c_profileobj
, f
,
2986 PyTrace_RETURN
, retval
)) {
2989 why
= WHY_EXCEPTION
;
2994 if (tstate
->frame
->f_exc_type
!= NULL
)
2995 reset_exc_info(tstate
);
2997 assert(tstate
->frame
->f_exc_value
== NULL
);
2998 assert(tstate
->frame
->f_exc_traceback
== NULL
);
3003 Py_LeaveRecursiveCall();
3004 tstate
->frame
= f
->f_back
;
3009 /* This is gonna seem *real weird*, but if you put some other code between
3010 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
3011 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
3014 PyEval_EvalCodeEx(PyCodeObject
*co
, PyObject
*globals
, PyObject
*locals
,
3015 PyObject
**args
, int argcount
, PyObject
**kws
, int kwcount
,
3016 PyObject
**defs
, int defcount
, PyObject
*closure
)
3018 register PyFrameObject
*f
;
3019 register PyObject
*retval
= NULL
;
3020 register PyObject
**fastlocals
, **freevars
;
3021 PyThreadState
*tstate
= PyThreadState_GET();
3024 if (globals
== NULL
) {
3025 PyErr_SetString(PyExc_SystemError
,
3026 "PyEval_EvalCodeEx: NULL globals");
3030 assert(tstate
!= NULL
);
3031 assert(globals
!= NULL
);
3032 f
= PyFrame_New(tstate
, co
, globals
, locals
);
3036 fastlocals
= f
->f_localsplus
;
3037 freevars
= f
->f_localsplus
+ co
->co_nlocals
;
3039 if (co
->co_argcount
> 0 ||
3040 co
->co_flags
& (CO_VARARGS
| CO_VARKEYWORDS
)) {
3043 PyObject
*kwdict
= NULL
;
3044 if (co
->co_flags
& CO_VARKEYWORDS
) {
3045 kwdict
= PyDict_New();
3048 i
= co
->co_argcount
;
3049 if (co
->co_flags
& CO_VARARGS
)
3051 SETLOCAL(i
, kwdict
);
3053 if (argcount
> co
->co_argcount
) {
3054 if (!(co
->co_flags
& CO_VARARGS
)) {
3055 PyErr_Format(PyExc_TypeError
,
3056 "%.200s() takes %s %d "
3057 "argument%s (%d given)",
3058 PyString_AsString(co
->co_name
),
3059 defcount
? "at most" : "exactly",
3061 co
->co_argcount
== 1 ? "" : "s",
3062 argcount
+ kwcount
);
3065 n
= co
->co_argcount
;
3067 for (i
= 0; i
< n
; i
++) {
3072 if (co
->co_flags
& CO_VARARGS
) {
3073 u
= PyTuple_New(argcount
- n
);
3076 SETLOCAL(co
->co_argcount
, u
);
3077 for (i
= n
; i
< argcount
; i
++) {
3080 PyTuple_SET_ITEM(u
, i
-n
, x
);
3083 for (i
= 0; i
< kwcount
; i
++) {
3084 PyObject
**co_varnames
;
3085 PyObject
*keyword
= kws
[2*i
];
3086 PyObject
*value
= kws
[2*i
+ 1];
3088 if (keyword
== NULL
|| !(PyString_Check(keyword
)
3089 #ifdef Py_USING_UNICODE
3090 || PyUnicode_Check(keyword
)
3093 PyErr_Format(PyExc_TypeError
,
3094 "%.200s() keywords must be strings",
3095 PyString_AsString(co
->co_name
));
3098 /* Speed hack: do raw pointer compares. As names are
3099 normally interned this should almost always hit. */
3100 co_varnames
= ((PyTupleObject
*)(co
->co_varnames
))->ob_item
;
3101 for (j
= 0; j
< co
->co_argcount
; j
++) {
3102 PyObject
*nm
= co_varnames
[j
];
3106 /* Slow fallback, just in case */
3107 for (j
= 0; j
< co
->co_argcount
; j
++) {
3108 PyObject
*nm
= co_varnames
[j
];
3109 int cmp
= PyObject_RichCompareBool(
3110 keyword
, nm
, Py_EQ
);
3116 if (kwdict
== NULL
) {
3117 PyObject
*kwd_str
= kwd_as_string(keyword
);
3119 PyErr_Format(PyExc_TypeError
,
3120 "%.200s() got an unexpected "
3121 "keyword argument '%.400s'",
3122 PyString_AsString(co
->co_name
),
3123 PyString_AsString(kwd_str
));
3128 PyDict_SetItem(kwdict
, keyword
, value
);
3131 if (GETLOCAL(j
) != NULL
) {
3132 PyObject
*kwd_str
= kwd_as_string(keyword
);
3134 PyErr_Format(PyExc_TypeError
,
3135 "%.200s() got multiple "
3136 "values for keyword "
3137 "argument '%.400s'",
3138 PyString_AsString(co
->co_name
),
3139 PyString_AsString(kwd_str
));
3147 if (argcount
< co
->co_argcount
) {
3148 int m
= co
->co_argcount
- defcount
;
3149 for (i
= argcount
; i
< m
; i
++) {
3150 if (GETLOCAL(i
) == NULL
) {
3152 for (j
= 0; j
< co
->co_argcount
; j
++)
3155 PyErr_Format(PyExc_TypeError
,
3156 "%.200s() takes %s %d "
3157 "argument%s (%d given)",
3158 PyString_AsString(co
->co_name
),
3159 ((co
->co_flags
& CO_VARARGS
) ||
3160 defcount
) ? "at least"
3162 m
, m
== 1 ? "" : "s", given
);
3170 for (; i
< defcount
; i
++) {
3171 if (GETLOCAL(m
+i
) == NULL
) {
3172 PyObject
*def
= defs
[i
];
3179 else if (argcount
> 0 || kwcount
> 0) {
3180 PyErr_Format(PyExc_TypeError
,
3181 "%.200s() takes no arguments (%d given)",
3182 PyString_AsString(co
->co_name
),
3183 argcount
+ kwcount
);
3186 /* Allocate and initialize storage for cell vars, and copy free
3187 vars into frame. This isn't too efficient right now. */
3188 if (PyTuple_GET_SIZE(co
->co_cellvars
)) {
3189 int i
, j
, nargs
, found
;
3190 char *cellname
, *argname
;
3193 nargs
= co
->co_argcount
;
3194 if (co
->co_flags
& CO_VARARGS
)
3196 if (co
->co_flags
& CO_VARKEYWORDS
)
3199 /* Initialize each cell var, taking into account
3200 cell vars that are initialized from arguments.
3202 Should arrange for the compiler to put cellvars
3203 that are arguments at the beginning of the cellvars
3204 list so that we can march over it more efficiently?
3206 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_cellvars
); ++i
) {
3207 cellname
= PyString_AS_STRING(
3208 PyTuple_GET_ITEM(co
->co_cellvars
, i
));
3210 for (j
= 0; j
< nargs
; j
++) {
3211 argname
= PyString_AS_STRING(
3212 PyTuple_GET_ITEM(co
->co_varnames
, j
));
3213 if (strcmp(cellname
, argname
) == 0) {
3214 c
= PyCell_New(GETLOCAL(j
));
3217 GETLOCAL(co
->co_nlocals
+ i
) = c
;
3223 c
= PyCell_New(NULL
);
3226 SETLOCAL(co
->co_nlocals
+ i
, c
);
3230 if (PyTuple_GET_SIZE(co
->co_freevars
)) {
3232 for (i
= 0; i
< PyTuple_GET_SIZE(co
->co_freevars
); ++i
) {
3233 PyObject
*o
= PyTuple_GET_ITEM(closure
, i
);
3235 freevars
[PyTuple_GET_SIZE(co
->co_cellvars
) + i
] = o
;
3239 if (co
->co_flags
& CO_GENERATOR
) {
3240 /* Don't need to keep the reference to f_back, it will be set
3241 * when the generator is resumed. */
3242 Py_XDECREF(f
->f_back
);
3245 PCALL(PCALL_GENERATOR
);
3247 /* Create a new generator that owns the ready to run frame
3248 * and return that as the value. */
3249 return PyGen_New(f
);
3252 retval
= PyEval_EvalFrameEx(f
,0);
3254 fail
: /* Jump here from prelude on failure */
3256 /* decref'ing the frame can cause __del__ methods to get invoked,
3257 which can call back into Python. While we're done with the
3258 current Python frame (f), the associated C stack is still in use,
3259 so recursion_depth must be boosted for the duration.
3261 assert(tstate
!= NULL
);
3262 ++tstate
->recursion_depth
;
3264 --tstate
->recursion_depth
;
3270 special_lookup(PyObject
*o
, char *meth
, PyObject
**cache
)
3273 if (PyInstance_Check(o
)) {
3275 return PyObject_GetAttrString(o
, meth
);
3277 return PyObject_GetAttr(o
, *cache
);
3279 res
= _PyObject_LookupSpecial(o
, meth
, cache
);
3280 if (res
== NULL
&& !PyErr_Occurred()) {
3281 PyErr_SetObject(PyExc_AttributeError
, *cache
);
3289 kwd_as_string(PyObject
*kwd
) {
3290 #ifdef Py_USING_UNICODE
3291 if (PyString_Check(kwd
)) {
3293 assert(PyString_Check(kwd
));
3297 #ifdef Py_USING_UNICODE
3299 return _PyUnicode_AsDefaultEncodedString(kwd
, "replace");
3304 /* Implementation notes for set_exc_info() and reset_exc_info():
3306 - Below, 'exc_ZZZ' stands for 'exc_type', 'exc_value' and
3307 'exc_traceback'. These always travel together.
3309 - tstate->curexc_ZZZ is the "hot" exception that is set by
3310 PyErr_SetString(), cleared by PyErr_Clear(), and so on.
3312 - Once an exception is caught by an except clause, it is transferred
3313 from tstate->curexc_ZZZ to tstate->exc_ZZZ, from which sys.exc_info()
3314 can pick it up. This is the primary task of set_exc_info().
3315 XXX That can't be right: set_exc_info() doesn't look at tstate->curexc_ZZZ.
3317 - Now let me explain the complicated dance with frame->f_exc_ZZZ.
3319 Long ago, when none of this existed, there were just a few globals:
3320 one set corresponding to the "hot" exception, and one set
3321 corresponding to sys.exc_ZZZ. (Actually, the latter weren't C
3322 globals; they were simply stored as sys.exc_ZZZ. For backwards
3323 compatibility, they still are!) The problem was that in code like
3327 "something that may fail"
3328 except "some exception":
3329 "do something else first"
3330 "print the exception from sys.exc_ZZZ."
3332 if "do something else first" invoked something that raised and caught
3333 an exception, sys.exc_ZZZ were overwritten. That was a frequent
3334 cause of subtle bugs. I fixed this by changing the semantics as
3337 - Within one frame, sys.exc_ZZZ will hold the last exception caught
3340 - But initially, and as long as no exception is caught in a given
3341 frame, sys.exc_ZZZ will hold the last exception caught in the
3342 previous frame (or the frame before that, etc.).
3344 The first bullet fixed the bug in the above example. The second
3345 bullet was for backwards compatibility: it was (and is) common to
3346 have a function that is called when an exception is caught, and to
3347 have that function access the caught exception via sys.exc_ZZZ.
3348 (Example: traceback.print_exc()).
3350 At the same time I fixed the problem that sys.exc_ZZZ weren't
3351 thread-safe, by introducing sys.exc_info() which gets it from tstate;
3352 but that's really a separate improvement.
3354 The reset_exc_info() function in ceval.c restores the tstate->exc_ZZZ
3355 variables to what they were before the current frame was called. The
3356 set_exc_info() function saves them on the frame so that
3357 reset_exc_info() can restore them. The invariant is that
3358 frame->f_exc_ZZZ is NULL iff the current frame never caught an
3359 exception (where "catching" an exception applies only to successful
3360 except clauses); and if the current frame ever caught an exception,
3361 frame->f_exc_ZZZ is the exception that was stored in tstate->exc_ZZZ
3362 at the start of the current frame.
3367 set_exc_info(PyThreadState
*tstate
,
3368 PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3370 PyFrameObject
*frame
= tstate
->frame
;
3371 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3373 assert(type
!= NULL
);
3374 assert(frame
!= NULL
);
3375 if (frame
->f_exc_type
== NULL
) {
3376 assert(frame
->f_exc_value
== NULL
);
3377 assert(frame
->f_exc_traceback
== NULL
);
3378 /* This frame didn't catch an exception before. */
3379 /* Save previous exception of this thread in this frame. */
3380 if (tstate
->exc_type
== NULL
) {
3381 /* XXX Why is this set to Py_None? */
3383 tstate
->exc_type
= Py_None
;
3385 Py_INCREF(tstate
->exc_type
);
3386 Py_XINCREF(tstate
->exc_value
);
3387 Py_XINCREF(tstate
->exc_traceback
);
3388 frame
->f_exc_type
= tstate
->exc_type
;
3389 frame
->f_exc_value
= tstate
->exc_value
;
3390 frame
->f_exc_traceback
= tstate
->exc_traceback
;
3392 /* Set new exception for this thread. */
3393 tmp_type
= tstate
->exc_type
;
3394 tmp_value
= tstate
->exc_value
;
3395 tmp_tb
= tstate
->exc_traceback
;
3399 tstate
->exc_type
= type
;
3400 tstate
->exc_value
= value
;
3401 tstate
->exc_traceback
= tb
;
3402 Py_XDECREF(tmp_type
);
3403 Py_XDECREF(tmp_value
);
3405 /* For b/w compatibility */
3406 PySys_SetObject("exc_type", type
);
3407 PySys_SetObject("exc_value", value
);
3408 PySys_SetObject("exc_traceback", tb
);
3412 reset_exc_info(PyThreadState
*tstate
)
3414 PyFrameObject
*frame
;
3415 PyObject
*tmp_type
, *tmp_value
, *tmp_tb
;
3417 /* It's a precondition that the thread state's frame caught an
3418 * exception -- verify in a debug build.
3420 assert(tstate
!= NULL
);
3421 frame
= tstate
->frame
;
3422 assert(frame
!= NULL
);
3423 assert(frame
->f_exc_type
!= NULL
);
3425 /* Copy the frame's exception info back to the thread state. */
3426 tmp_type
= tstate
->exc_type
;
3427 tmp_value
= tstate
->exc_value
;
3428 tmp_tb
= tstate
->exc_traceback
;
3429 Py_INCREF(frame
->f_exc_type
);
3430 Py_XINCREF(frame
->f_exc_value
);
3431 Py_XINCREF(frame
->f_exc_traceback
);
3432 tstate
->exc_type
= frame
->f_exc_type
;
3433 tstate
->exc_value
= frame
->f_exc_value
;
3434 tstate
->exc_traceback
= frame
->f_exc_traceback
;
3435 Py_XDECREF(tmp_type
);
3436 Py_XDECREF(tmp_value
);
3439 /* For b/w compatibility */
3440 PySys_SetObject("exc_type", frame
->f_exc_type
);
3441 PySys_SetObject("exc_value", frame
->f_exc_value
);
3442 PySys_SetObject("exc_traceback", frame
->f_exc_traceback
);
3444 /* Clear the frame's exception info. */
3445 tmp_type
= frame
->f_exc_type
;
3446 tmp_value
= frame
->f_exc_value
;
3447 tmp_tb
= frame
->f_exc_traceback
;
3448 frame
->f_exc_type
= NULL
;
3449 frame
->f_exc_value
= NULL
;
3450 frame
->f_exc_traceback
= NULL
;
3451 Py_DECREF(tmp_type
);
3452 Py_XDECREF(tmp_value
);
3456 /* Logic for the raise statement (too complicated for inlining).
3457 This *consumes* a reference count to each of its arguments. */
3458 static enum why_code
3459 do_raise(PyObject
*type
, PyObject
*value
, PyObject
*tb
)
3463 PyThreadState
*tstate
= PyThreadState_GET();
3464 type
= tstate
->exc_type
== NULL
? Py_None
: tstate
->exc_type
;
3465 value
= tstate
->exc_value
;
3466 tb
= tstate
->exc_traceback
;
3472 /* We support the following forms of raise:
3473 raise <class>, <classinstance>
3474 raise <class>, <argument tuple>
3476 raise <class>, <argument>
3477 raise <classinstance>, None
3478 raise <string>, <object>
3479 raise <string>, None
3481 An omitted second argument is the same as None.
3483 In addition, raise <tuple>, <anything> is the same as
3484 raising the tuple's first item (and it better have one!);
3485 this rule is applied recursively.
3487 Finally, an optional third argument can be supplied, which
3488 gives the traceback to be substituted (useful when
3489 re-raising an exception after examining it). */
3491 /* First, check the traceback argument, replacing None with
3493 if (tb
== Py_None
) {
3497 else if (tb
!= NULL
&& !PyTraceBack_Check(tb
)) {
3498 PyErr_SetString(PyExc_TypeError
,
3499 "raise: arg 3 must be a traceback or None");
3503 /* Next, replace a missing value with None */
3504 if (value
== NULL
) {
3509 /* Next, repeatedly, replace a tuple exception with its first item */
3510 while (PyTuple_Check(type
) && PyTuple_Size(type
) > 0) {
3511 PyObject
*tmp
= type
;
3512 type
= PyTuple_GET_ITEM(type
, 0);
3517 if (PyExceptionClass_Check(type
))
3518 PyErr_NormalizeException(&type
, &value
, &tb
);
3520 else if (PyExceptionInstance_Check(type
)) {
3521 /* Raising an instance. The value should be a dummy. */
3522 if (value
!= Py_None
) {
3523 PyErr_SetString(PyExc_TypeError
,
3524 "instance exception may not have a separate value");
3528 /* Normalize to raise <class>, <instance> */
3531 type
= PyExceptionInstance_Class(type
);
3536 /* Not something you can raise. You get an exception
3537 anyway, just not what you specified :-) */
3538 PyErr_Format(PyExc_TypeError
,
3539 "exceptions must be old-style classes or "
3540 "derived from BaseException, not %s",
3541 type
->ob_type
->tp_name
);
3545 assert(PyExceptionClass_Check(type
));
3546 if (Py_Py3kWarningFlag
&& PyClass_Check(type
)) {
3547 if (PyErr_WarnEx(PyExc_DeprecationWarning
,
3548 "exceptions must derive from BaseException "
3553 PyErr_Restore(type
, value
, tb
);
3555 return WHY_EXCEPTION
;
3562 return WHY_EXCEPTION
;
3565 /* Iterate v argcnt times and store the results on the stack (via decreasing
3566 sp). Return 1 for success, 0 if error. */
3569 unpack_iterable(PyObject
*v
, int argcnt
, PyObject
**sp
)
3572 PyObject
*it
; /* iter(v) */
3577 it
= PyObject_GetIter(v
);
3581 for (; i
< argcnt
; i
++) {
3582 w
= PyIter_Next(it
);
3584 /* Iterator done, via error or exhaustion. */
3585 if (!PyErr_Occurred()) {
3586 PyErr_Format(PyExc_ValueError
,
3587 "need more than %d value%s to unpack",
3588 i
, i
== 1 ? "" : "s");
3595 /* We better have exhausted the iterator now. */
3596 w
= PyIter_Next(it
);
3598 if (PyErr_Occurred())
3604 PyErr_SetString(PyExc_ValueError
, "too many values to unpack");
3607 for (; i
> 0; i
--, sp
++)
3616 prtrace(PyObject
*v
, char *str
)
3619 if (PyObject_Print(v
, stdout
, 0) != 0)
3620 PyErr_Clear(); /* Don't know what else to do */
3627 call_exc_trace(Py_tracefunc func
, PyObject
*self
, PyFrameObject
*f
)
3629 PyObject
*type
, *value
, *traceback
, *arg
;
3631 PyErr_Fetch(&type
, &value
, &traceback
);
3632 if (value
== NULL
) {
3636 arg
= PyTuple_Pack(3, type
, value
, traceback
);
3638 PyErr_Restore(type
, value
, traceback
);
3641 err
= call_trace(func
, self
, f
, PyTrace_EXCEPTION
, arg
);
3644 PyErr_Restore(type
, value
, traceback
);
3648 Py_XDECREF(traceback
);
3653 call_trace_protected(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3654 int what
, PyObject
*arg
)
3656 PyObject
*type
, *value
, *traceback
;
3658 PyErr_Fetch(&type
, &value
, &traceback
);
3659 err
= call_trace(func
, obj
, frame
, what
, arg
);
3662 PyErr_Restore(type
, value
, traceback
);
3668 Py_XDECREF(traceback
);
3674 call_trace(Py_tracefunc func
, PyObject
*obj
, PyFrameObject
*frame
,
3675 int what
, PyObject
*arg
)
3677 register PyThreadState
*tstate
= frame
->f_tstate
;
3679 if (tstate
->tracing
)
3682 tstate
->use_tracing
= 0;
3683 result
= func(obj
, frame
, what
, arg
);
3684 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3685 || (tstate
->c_profilefunc
!= NULL
));
3691 _PyEval_CallTracing(PyObject
*func
, PyObject
*args
)
3693 PyFrameObject
*frame
= PyEval_GetFrame();
3694 PyThreadState
*tstate
= frame
->f_tstate
;
3695 int save_tracing
= tstate
->tracing
;
3696 int save_use_tracing
= tstate
->use_tracing
;
3699 tstate
->tracing
= 0;
3700 tstate
->use_tracing
= ((tstate
->c_tracefunc
!= NULL
)
3701 || (tstate
->c_profilefunc
!= NULL
));
3702 result
= PyObject_Call(func
, args
, NULL
);
3703 tstate
->tracing
= save_tracing
;
3704 tstate
->use_tracing
= save_use_tracing
;
3708 /* See Objects/lnotab_notes.txt for a description of how tracing works. */
3710 maybe_call_line_trace(Py_tracefunc func
, PyObject
*obj
,
3711 PyFrameObject
*frame
, int *instr_lb
, int *instr_ub
,
3715 int line
= frame
->f_lineno
;
3717 /* If the last instruction executed isn't in the current
3718 instruction window, reset the window.
3720 if (frame
->f_lasti
< *instr_lb
|| frame
->f_lasti
>= *instr_ub
) {
3722 line
= _PyCode_CheckLineNumber(frame
->f_code
, frame
->f_lasti
,
3724 *instr_lb
= bounds
.ap_lower
;
3725 *instr_ub
= bounds
.ap_upper
;
3727 /* If the last instruction falls at the start of a line or if
3728 it represents a jump backwards, update the frame's line
3729 number and call the trace function. */
3730 if (frame
->f_lasti
== *instr_lb
|| frame
->f_lasti
< *instr_prev
) {
3731 frame
->f_lineno
= line
;
3732 result
= call_trace(func
, obj
, frame
, PyTrace_LINE
, Py_None
);
3734 *instr_prev
= frame
->f_lasti
;
3739 PyEval_SetProfile(Py_tracefunc func
, PyObject
*arg
)
3741 PyThreadState
*tstate
= PyThreadState_GET();
3742 PyObject
*temp
= tstate
->c_profileobj
;
3744 tstate
->c_profilefunc
= NULL
;
3745 tstate
->c_profileobj
= NULL
;
3746 /* Must make sure that tracing is not ignored if 'temp' is freed */
3747 tstate
->use_tracing
= tstate
->c_tracefunc
!= NULL
;
3749 tstate
->c_profilefunc
= func
;
3750 tstate
->c_profileobj
= arg
;
3751 /* Flag that tracing or profiling is turned on */
3752 tstate
->use_tracing
= (func
!= NULL
) || (tstate
->c_tracefunc
!= NULL
);
3756 PyEval_SetTrace(Py_tracefunc func
, PyObject
*arg
)
3758 PyThreadState
*tstate
= PyThreadState_GET();
3759 PyObject
*temp
= tstate
->c_traceobj
;
3760 _Py_TracingPossible
+= (func
!= NULL
) - (tstate
->c_tracefunc
!= NULL
);
3762 tstate
->c_tracefunc
= NULL
;
3763 tstate
->c_traceobj
= NULL
;
3764 /* Must make sure that profiling is not ignored if 'temp' is freed */
3765 tstate
->use_tracing
= tstate
->c_profilefunc
!= NULL
;
3767 tstate
->c_tracefunc
= func
;
3768 tstate
->c_traceobj
= arg
;
3769 /* Flag that tracing or profiling is turned on */
3770 tstate
->use_tracing
= ((func
!= NULL
)
3771 || (tstate
->c_profilefunc
!= NULL
));
3775 PyEval_GetBuiltins(void)
3777 PyFrameObject
*current_frame
= PyEval_GetFrame();
3778 if (current_frame
== NULL
)
3779 return PyThreadState_GET()->interp
->builtins
;
3781 return current_frame
->f_builtins
;
3785 PyEval_GetLocals(void)
3787 PyFrameObject
*current_frame
= PyEval_GetFrame();
3788 if (current_frame
== NULL
)
3790 PyFrame_FastToLocals(current_frame
);
3791 return current_frame
->f_locals
;
3795 PyEval_GetGlobals(void)
3797 PyFrameObject
*current_frame
= PyEval_GetFrame();
3798 if (current_frame
== NULL
)
3801 return current_frame
->f_globals
;
3805 PyEval_GetFrame(void)
3807 PyThreadState
*tstate
= PyThreadState_GET();
3808 return _PyThreadState_GetFrame(tstate
);
3812 PyEval_GetRestricted(void)
3814 PyFrameObject
*current_frame
= PyEval_GetFrame();
3815 return current_frame
== NULL
? 0 : PyFrame_IsRestricted(current_frame
);
3819 PyEval_MergeCompilerFlags(PyCompilerFlags
*cf
)
3821 PyFrameObject
*current_frame
= PyEval_GetFrame();
3822 int result
= cf
->cf_flags
!= 0;
3824 if (current_frame
!= NULL
) {
3825 const int codeflags
= current_frame
->f_code
->co_flags
;
3826 const int compilerflags
= codeflags
& PyCF_MASK
;
3827 if (compilerflags
) {
3829 cf
->cf_flags
|= compilerflags
;
3831 #if 0 /* future keyword */
3832 if (codeflags
& CO_GENERATOR_ALLOWED
) {
3834 cf
->cf_flags
|= CO_GENERATOR_ALLOWED
;
3844 PyObject
*f
= PySys_GetObject("stdout");
3847 if (!PyFile_SoftSpace(f
, 0))
3849 return PyFile_WriteString("\n", f
);
3853 /* External interface to call any callable object.
3854 The arg must be a tuple or NULL. The kw must be a dict or NULL. */
3857 PyEval_CallObjectWithKeywords(PyObject
*func
, PyObject
*arg
, PyObject
*kw
)
3862 arg
= PyTuple_New(0);
3866 else if (!PyTuple_Check(arg
)) {
3867 PyErr_SetString(PyExc_TypeError
,
3868 "argument list must be a tuple");
3874 if (kw
!= NULL
&& !PyDict_Check(kw
)) {
3875 PyErr_SetString(PyExc_TypeError
,
3876 "keyword list must be a dictionary");
3881 result
= PyObject_Call(func
, arg
, kw
);
3887 PyEval_GetFuncName(PyObject
*func
)
3889 if (PyMethod_Check(func
))
3890 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func
));
3891 else if (PyFunction_Check(func
))
3892 return PyString_AsString(((PyFunctionObject
*)func
)->func_name
);
3893 else if (PyCFunction_Check(func
))
3894 return ((PyCFunctionObject
*)func
)->m_ml
->ml_name
;
3895 else if (PyClass_Check(func
))
3896 return PyString_AsString(((PyClassObject
*)func
)->cl_name
);
3897 else if (PyInstance_Check(func
)) {
3898 return PyString_AsString(
3899 ((PyInstanceObject
*)func
)->in_class
->cl_name
);
3901 return func
->ob_type
->tp_name
;
3906 PyEval_GetFuncDesc(PyObject
*func
)
3908 if (PyMethod_Check(func
))
3910 else if (PyFunction_Check(func
))
3912 else if (PyCFunction_Check(func
))
3914 else if (PyClass_Check(func
))
3915 return " constructor";
3916 else if (PyInstance_Check(func
)) {
3924 err_args(PyObject
*func
, int flags
, int nargs
)
3926 if (flags
& METH_NOARGS
)
3927 PyErr_Format(PyExc_TypeError
,
3928 "%.200s() takes no arguments (%d given)",
3929 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3932 PyErr_Format(PyExc_TypeError
,
3933 "%.200s() takes exactly one argument (%d given)",
3934 ((PyCFunctionObject
*)func
)->m_ml
->ml_name
,
3938 #define C_TRACE(x, call) \
3939 if (tstate->use_tracing && tstate->c_profilefunc) { \
3940 if (call_trace(tstate->c_profilefunc, \
3941 tstate->c_profileobj, \
3942 tstate->frame, PyTrace_C_CALL, \
3948 if (tstate->c_profilefunc != NULL) { \
3950 call_trace_protected(tstate->c_profilefunc, \
3951 tstate->c_profileobj, \
3952 tstate->frame, PyTrace_C_EXCEPTION, \
3954 /* XXX should pass (type, value, tb) */ \
3956 if (call_trace(tstate->c_profilefunc, \
3957 tstate->c_profileobj, \
3958 tstate->frame, PyTrace_C_RETURN, \
3971 call_function(PyObject
***pp_stack
, int oparg
3973 , uint64
* pintr0
, uint64
* pintr1
3977 int na
= oparg
& 0xff;
3978 int nk
= (oparg
>>8) & 0xff;
3979 int n
= na
+ 2 * nk
;
3980 PyObject
**pfunc
= (*pp_stack
) - n
- 1;
3981 PyObject
*func
= *pfunc
;
3984 /* Always dispatch PyCFunction first, because these are
3985 presumed to be the most frequent callable object.
3987 if (PyCFunction_Check(func
) && nk
== 0) {
3988 int flags
= PyCFunction_GET_FLAGS(func
);
3989 PyThreadState
*tstate
= PyThreadState_GET();
3991 PCALL(PCALL_CFUNCTION
);
3992 if (flags
& (METH_NOARGS
| METH_O
)) {
3993 PyCFunction meth
= PyCFunction_GET_FUNCTION(func
);
3994 PyObject
*self
= PyCFunction_GET_SELF(func
);
3995 if (flags
& METH_NOARGS
&& na
== 0) {
3996 C_TRACE(x
, (*meth
)(self
,NULL
));
3998 else if (flags
& METH_O
&& na
== 1) {
3999 PyObject
*arg
= EXT_POP(*pp_stack
);
4000 C_TRACE(x
, (*meth
)(self
,arg
));
4004 err_args(func
, flags
, na
);
4010 callargs
= load_args(pp_stack
, na
);
4011 READ_TIMESTAMP(*pintr0
);
4012 C_TRACE(x
, PyCFunction_Call(func
,callargs
,NULL
));
4013 READ_TIMESTAMP(*pintr1
);
4014 Py_XDECREF(callargs
);
4017 if (PyMethod_Check(func
) && PyMethod_GET_SELF(func
) != NULL
) {
4018 /* optimize access to bound methods */
4019 PyObject
*self
= PyMethod_GET_SELF(func
);
4020 PCALL(PCALL_METHOD
);
4021 PCALL(PCALL_BOUND_METHOD
);
4023 func
= PyMethod_GET_FUNCTION(func
);
4031 READ_TIMESTAMP(*pintr0
);
4032 if (PyFunction_Check(func
))
4033 x
= fast_function(func
, pp_stack
, n
, na
, nk
);
4035 x
= do_call(func
, pp_stack
, na
, nk
);
4036 READ_TIMESTAMP(*pintr1
);
4040 /* Clear the stack of the function object. Also removes
4041 the arguments in case they weren't consumed already
4042 (fast_function() and err_args() leave them on the stack).
4044 while ((*pp_stack
) > pfunc
) {
4045 w
= EXT_POP(*pp_stack
);
4052 /* The fast_function() function optimize calls for which no argument
4053 tuple is necessary; the objects are passed directly from the stack.
4054 For the simplest case -- a function that takes only positional
4055 arguments and is called with only positional arguments -- it
4056 inlines the most primitive frame setup code from
4057 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
4058 done before evaluating the frame.
4062 fast_function(PyObject
*func
, PyObject
***pp_stack
, int n
, int na
, int nk
)
4064 PyCodeObject
*co
= (PyCodeObject
*)PyFunction_GET_CODE(func
);
4065 PyObject
*globals
= PyFunction_GET_GLOBALS(func
);
4066 PyObject
*argdefs
= PyFunction_GET_DEFAULTS(func
);
4067 PyObject
**d
= NULL
;
4070 PCALL(PCALL_FUNCTION
);
4071 PCALL(PCALL_FAST_FUNCTION
);
4072 if (argdefs
== NULL
&& co
->co_argcount
== n
&& nk
==0 &&
4073 co
->co_flags
== (CO_OPTIMIZED
| CO_NEWLOCALS
| CO_NOFREE
)) {
4075 PyObject
*retval
= NULL
;
4076 PyThreadState
*tstate
= PyThreadState_GET();
4077 PyObject
**fastlocals
, **stack
;
4080 PCALL(PCALL_FASTER_FUNCTION
);
4081 assert(globals
!= NULL
);
4082 /* XXX Perhaps we should create a specialized
4083 PyFrame_New() that doesn't take locals, but does
4084 take builtins without sanity checking them.
4086 assert(tstate
!= NULL
);
4087 f
= PyFrame_New(tstate
, co
, globals
, NULL
);
4091 fastlocals
= f
->f_localsplus
;
4092 stack
= (*pp_stack
) - n
;
4094 for (i
= 0; i
< n
; i
++) {
4096 fastlocals
[i
] = *stack
++;
4098 retval
= PyEval_EvalFrameEx(f
,0);
4099 ++tstate
->recursion_depth
;
4101 --tstate
->recursion_depth
;
4104 if (argdefs
!= NULL
) {
4105 d
= &PyTuple_GET_ITEM(argdefs
, 0);
4106 nd
= Py_SIZE(argdefs
);
4108 return PyEval_EvalCodeEx(co
, globals
,
4109 (PyObject
*)NULL
, (*pp_stack
)-n
, na
,
4110 (*pp_stack
)-2*nk
, nk
, d
, nd
,
4111 PyFunction_GET_CLOSURE(func
));
4115 update_keyword_args(PyObject
*orig_kwdict
, int nk
, PyObject
***pp_stack
,
4118 PyObject
*kwdict
= NULL
;
4119 if (orig_kwdict
== NULL
)
4120 kwdict
= PyDict_New();
4122 kwdict
= PyDict_Copy(orig_kwdict
);
4123 Py_DECREF(orig_kwdict
);
4129 PyObject
*value
= EXT_POP(*pp_stack
);
4130 PyObject
*key
= EXT_POP(*pp_stack
);
4131 if (PyDict_GetItem(kwdict
, key
) != NULL
) {
4132 PyErr_Format(PyExc_TypeError
,
4133 "%.200s%s got multiple values "
4134 "for keyword argument '%.200s'",
4135 PyEval_GetFuncName(func
),
4136 PyEval_GetFuncDesc(func
),
4137 PyString_AsString(key
));
4143 err
= PyDict_SetItem(kwdict
, key
, value
);
4155 update_star_args(int nstack
, int nstar
, PyObject
*stararg
,
4156 PyObject
***pp_stack
)
4158 PyObject
*callargs
, *w
;
4160 callargs
= PyTuple_New(nstack
+ nstar
);
4161 if (callargs
== NULL
) {
4166 for (i
= 0; i
< nstar
; i
++) {
4167 PyObject
*a
= PyTuple_GET_ITEM(stararg
, i
);
4169 PyTuple_SET_ITEM(callargs
, nstack
+ i
, a
);
4172 while (--nstack
>= 0) {
4173 w
= EXT_POP(*pp_stack
);
4174 PyTuple_SET_ITEM(callargs
, nstack
, w
);
4180 load_args(PyObject
***pp_stack
, int na
)
4182 PyObject
*args
= PyTuple_New(na
);
4188 w
= EXT_POP(*pp_stack
);
4189 PyTuple_SET_ITEM(args
, na
, w
);
4195 do_call(PyObject
*func
, PyObject
***pp_stack
, int na
, int nk
)
4197 PyObject
*callargs
= NULL
;
4198 PyObject
*kwdict
= NULL
;
4199 PyObject
*result
= NULL
;
4202 kwdict
= update_keyword_args(NULL
, nk
, pp_stack
, func
);
4206 callargs
= load_args(pp_stack
, na
);
4207 if (callargs
== NULL
)
4210 /* At this point, we have to look at the type of func to
4211 update the call stats properly. Do it here so as to avoid
4212 exposing the call stats machinery outside ceval.c
4214 if (PyFunction_Check(func
))
4215 PCALL(PCALL_FUNCTION
);
4216 else if (PyMethod_Check(func
))
4217 PCALL(PCALL_METHOD
);
4218 else if (PyType_Check(func
))
4220 else if (PyCFunction_Check(func
))
4221 PCALL(PCALL_CFUNCTION
);
4225 if (PyCFunction_Check(func
)) {
4226 PyThreadState
*tstate
= PyThreadState_GET();
4227 C_TRACE(result
, PyCFunction_Call(func
, callargs
, kwdict
));
4230 result
= PyObject_Call(func
, callargs
, kwdict
);
4232 Py_XDECREF(callargs
);
4238 ext_do_call(PyObject
*func
, PyObject
***pp_stack
, int flags
, int na
, int nk
)
4241 PyObject
*callargs
= NULL
;
4242 PyObject
*stararg
= NULL
;
4243 PyObject
*kwdict
= NULL
;
4244 PyObject
*result
= NULL
;
4246 if (flags
& CALL_FLAG_KW
) {
4247 kwdict
= EXT_POP(*pp_stack
);
4248 if (!PyDict_Check(kwdict
)) {
4253 if (PyDict_Update(d
, kwdict
) != 0) {
4255 /* PyDict_Update raises attribute
4256 * error (percolated from an attempt
4257 * to get 'keys' attribute) instead of
4258 * a type error if its second argument
4261 if (PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4262 PyErr_Format(PyExc_TypeError
,
4263 "%.200s%.200s argument after ** "
4264 "must be a mapping, not %.200s",
4265 PyEval_GetFuncName(func
),
4266 PyEval_GetFuncDesc(func
),
4267 kwdict
->ob_type
->tp_name
);
4275 if (flags
& CALL_FLAG_VAR
) {
4276 stararg
= EXT_POP(*pp_stack
);
4277 if (!PyTuple_Check(stararg
)) {
4279 t
= PySequence_Tuple(stararg
);
4281 if (PyErr_ExceptionMatches(PyExc_TypeError
)) {
4282 PyErr_Format(PyExc_TypeError
,
4283 "%.200s%.200s argument after * "
4284 "must be a sequence, not %200s",
4285 PyEval_GetFuncName(func
),
4286 PyEval_GetFuncDesc(func
),
4287 stararg
->ob_type
->tp_name
);
4294 nstar
= PyTuple_GET_SIZE(stararg
);
4297 kwdict
= update_keyword_args(kwdict
, nk
, pp_stack
, func
);
4301 callargs
= update_star_args(na
, nstar
, stararg
, pp_stack
);
4302 if (callargs
== NULL
)
4305 /* At this point, we have to look at the type of func to
4306 update the call stats properly. Do it here so as to avoid
4307 exposing the call stats machinery outside ceval.c
4309 if (PyFunction_Check(func
))
4310 PCALL(PCALL_FUNCTION
);
4311 else if (PyMethod_Check(func
))
4312 PCALL(PCALL_METHOD
);
4313 else if (PyType_Check(func
))
4315 else if (PyCFunction_Check(func
))
4316 PCALL(PCALL_CFUNCTION
);
4320 if (PyCFunction_Check(func
)) {
4321 PyThreadState
*tstate
= PyThreadState_GET();
4322 C_TRACE(result
, PyCFunction_Call(func
, callargs
, kwdict
));
4325 result
= PyObject_Call(func
, callargs
, kwdict
);
4327 Py_XDECREF(callargs
);
4329 Py_XDECREF(stararg
);
4333 /* Extract a slice index from a PyInt or PyLong or an object with the
4334 nb_index slot defined, and store in *pi.
4335 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4336 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4337 Return 0 on error, 1 on success.
4339 /* Note: If v is NULL, return success without storing into *pi. This
4340 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4341 called by the SLICE opcode with v and/or w equal to NULL.
4344 _PyEval_SliceIndex(PyObject
*v
, Py_ssize_t
*pi
)
4348 if (PyInt_Check(v
)) {
4349 /* XXX(nnorwitz): I think PyInt_AS_LONG is correct,
4350 however, it looks like it should be AsSsize_t.
4351 There should be a comment here explaining why.
4353 x
= PyInt_AS_LONG(v
);
4355 else if (PyIndex_Check(v
)) {
4356 x
= PyNumber_AsSsize_t(v
, NULL
);
4357 if (x
== -1 && PyErr_Occurred())
4361 PyErr_SetString(PyExc_TypeError
,
4362 "slice indices must be integers or "
4363 "None or have an __index__ method");
4372 #define ISINDEX(x) ((x) == NULL || \
4373 PyInt_Check(x) || PyLong_Check(x) || PyIndex_Check(x))
4376 apply_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
) /* return u[v:w] */
4378 PyTypeObject
*tp
= u
->ob_type
;
4379 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4381 if (sq
&& sq
->sq_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4382 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4383 if (!_PyEval_SliceIndex(v
, &ilow
))
4385 if (!_PyEval_SliceIndex(w
, &ihigh
))
4387 return PySequence_GetSlice(u
, ilow
, ihigh
);
4390 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4391 if (slice
!= NULL
) {
4392 PyObject
*res
= PyObject_GetItem(u
, slice
);
4402 assign_slice(PyObject
*u
, PyObject
*v
, PyObject
*w
, PyObject
*x
)
4405 PyTypeObject
*tp
= u
->ob_type
;
4406 PySequenceMethods
*sq
= tp
->tp_as_sequence
;
4408 if (sq
&& sq
->sq_ass_slice
&& ISINDEX(v
) && ISINDEX(w
)) {
4409 Py_ssize_t ilow
= 0, ihigh
= PY_SSIZE_T_MAX
;
4410 if (!_PyEval_SliceIndex(v
, &ilow
))
4412 if (!_PyEval_SliceIndex(w
, &ihigh
))
4415 return PySequence_DelSlice(u
, ilow
, ihigh
);
4417 return PySequence_SetSlice(u
, ilow
, ihigh
, x
);
4420 PyObject
*slice
= PySlice_New(v
, w
, NULL
);
4421 if (slice
!= NULL
) {
4424 res
= PyObject_SetItem(u
, slice
, x
);
4426 res
= PyObject_DelItem(u
, slice
);
4435 #define Py3kExceptionClass_Check(x) \
4436 (PyType_Check((x)) && \
4437 PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS))
4439 #define CANNOT_CATCH_MSG "catching classes that don't inherit from " \
4440 "BaseException is not allowed in 3.x"
4443 cmp_outcome(int op
, register PyObject
*v
, register PyObject
*w
)
4454 res
= PySequence_Contains(w
, v
);
4459 res
= PySequence_Contains(w
, v
);
4464 case PyCmp_EXC_MATCH
:
4465 if (PyTuple_Check(w
)) {
4466 Py_ssize_t i
, length
;
4467 length
= PyTuple_Size(w
);
4468 for (i
= 0; i
< length
; i
+= 1) {
4469 PyObject
*exc
= PyTuple_GET_ITEM(w
, i
);
4470 if (PyString_Check(exc
)) {
4472 ret_val
= PyErr_WarnEx(
4473 PyExc_DeprecationWarning
,
4474 "catching of string "
4475 "exceptions is deprecated", 1);
4479 else if (Py_Py3kWarningFlag
&&
4480 !PyTuple_Check(exc
) &&
4481 !Py3kExceptionClass_Check(exc
))
4484 ret_val
= PyErr_WarnEx(
4485 PyExc_DeprecationWarning
,
4486 CANNOT_CATCH_MSG
, 1);
4493 if (PyString_Check(w
)) {
4495 ret_val
= PyErr_WarnEx(
4496 PyExc_DeprecationWarning
,
4497 "catching of string "
4498 "exceptions is deprecated", 1);
4502 else if (Py_Py3kWarningFlag
&&
4503 !PyTuple_Check(w
) &&
4504 !Py3kExceptionClass_Check(w
))
4507 ret_val
= PyErr_WarnEx(
4508 PyExc_DeprecationWarning
,
4509 CANNOT_CATCH_MSG
, 1);
4514 res
= PyErr_GivenExceptionMatches(v
, w
);
4517 return PyObject_RichCompare(v
, w
, op
);
4519 v
= res
? Py_True
: Py_False
;
4525 import_from(PyObject
*v
, PyObject
*name
)
4529 x
= PyObject_GetAttr(v
, name
);
4530 if (x
== NULL
&& PyErr_ExceptionMatches(PyExc_AttributeError
)) {
4531 PyErr_Format(PyExc_ImportError
,
4532 "cannot import name %.230s",
4533 PyString_AsString(name
));
4539 import_all_from(PyObject
*locals
, PyObject
*v
)
4541 PyObject
*all
= PyObject_GetAttrString(v
, "__all__");
4542 PyObject
*dict
, *name
, *value
;
4543 int skip_leading_underscores
= 0;
4547 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4548 return -1; /* Unexpected error */
4550 dict
= PyObject_GetAttrString(v
, "__dict__");
4552 if (!PyErr_ExceptionMatches(PyExc_AttributeError
))
4554 PyErr_SetString(PyExc_ImportError
,
4555 "from-import-* object has no __dict__ and no __all__");
4558 all
= PyMapping_Keys(dict
);
4562 skip_leading_underscores
= 1;
4565 for (pos
= 0, err
= 0; ; pos
++) {
4566 name
= PySequence_GetItem(all
, pos
);
4568 if (!PyErr_ExceptionMatches(PyExc_IndexError
))
4574 if (skip_leading_underscores
&&
4575 PyString_Check(name
) &&
4576 PyString_AS_STRING(name
)[0] == '_')
4581 value
= PyObject_GetAttr(v
, name
);
4584 else if (PyDict_CheckExact(locals
))
4585 err
= PyDict_SetItem(locals
, name
, value
);
4587 err
= PyObject_SetItem(locals
, name
, value
);
4598 build_class(PyObject
*methods
, PyObject
*bases
, PyObject
*name
)
4600 PyObject
*metaclass
= NULL
, *result
, *base
;
4602 if (PyDict_Check(methods
))
4603 metaclass
= PyDict_GetItemString(methods
, "__metaclass__");
4604 if (metaclass
!= NULL
)
4605 Py_INCREF(metaclass
);
4606 else if (PyTuple_Check(bases
) && PyTuple_GET_SIZE(bases
) > 0) {
4607 base
= PyTuple_GET_ITEM(bases
, 0);
4608 metaclass
= PyObject_GetAttrString(base
, "__class__");
4609 if (metaclass
== NULL
) {
4611 metaclass
= (PyObject
*)base
->ob_type
;
4612 Py_INCREF(metaclass
);
4616 PyObject
*g
= PyEval_GetGlobals();
4617 if (g
!= NULL
&& PyDict_Check(g
))
4618 metaclass
= PyDict_GetItemString(g
, "__metaclass__");
4619 if (metaclass
== NULL
)
4620 metaclass
= (PyObject
*) &PyClass_Type
;
4621 Py_INCREF(metaclass
);
4623 result
= PyObject_CallFunctionObjArgs(metaclass
, name
, bases
, methods
,
4625 Py_DECREF(metaclass
);
4626 if (result
== NULL
&& PyErr_ExceptionMatches(PyExc_TypeError
)) {
4627 /* A type error here likely means that the user passed
4628 in a base that was not a class (such the random module
4629 instead of the random.random type). Help them out with
4630 by augmenting the error message with more information.*/
4632 PyObject
*ptype
, *pvalue
, *ptraceback
;
4634 PyErr_Fetch(&ptype
, &pvalue
, &ptraceback
);
4635 if (PyString_Check(pvalue
)) {
4637 newmsg
= PyString_FromFormat(
4638 "Error when calling the metaclass bases\n"
4640 PyString_AS_STRING(pvalue
));
4641 if (newmsg
!= NULL
) {
4646 PyErr_Restore(ptype
, pvalue
, ptraceback
);
4652 exec_statement(PyFrameObject
*f
, PyObject
*prog
, PyObject
*globals
,
4659 if (PyTuple_Check(prog
) && globals
== Py_None
&& locals
== Py_None
&&
4660 ((n
= PyTuple_Size(prog
)) == 2 || n
== 3)) {
4661 /* Backward compatibility hack */
4662 globals
= PyTuple_GetItem(prog
, 1);
4664 locals
= PyTuple_GetItem(prog
, 2);
4665 prog
= PyTuple_GetItem(prog
, 0);
4667 if (globals
== Py_None
) {
4668 globals
= PyEval_GetGlobals();
4669 if (locals
== Py_None
) {
4670 locals
= PyEval_GetLocals();
4673 if (!globals
|| !locals
) {
4674 PyErr_SetString(PyExc_SystemError
,
4675 "globals and locals cannot be NULL");
4679 else if (locals
== Py_None
)
4681 if (!PyString_Check(prog
) &&
4682 #ifdef Py_USING_UNICODE
4683 !PyUnicode_Check(prog
) &&
4685 !PyCode_Check(prog
) &&
4686 !PyFile_Check(prog
)) {
4687 PyErr_SetString(PyExc_TypeError
,
4688 "exec: arg 1 must be a string, file, or code object");
4691 if (!PyDict_Check(globals
)) {
4692 PyErr_SetString(PyExc_TypeError
,
4693 "exec: arg 2 must be a dictionary or None");
4696 if (!PyMapping_Check(locals
)) {
4697 PyErr_SetString(PyExc_TypeError
,
4698 "exec: arg 3 must be a mapping or None");
4701 if (PyDict_GetItemString(globals
, "__builtins__") == NULL
)
4702 PyDict_SetItemString(globals
, "__builtins__", f
->f_builtins
);
4703 if (PyCode_Check(prog
)) {
4704 if (PyCode_GetNumFree((PyCodeObject
*)prog
) > 0) {
4705 PyErr_SetString(PyExc_TypeError
,
4706 "code object passed to exec may not contain free variables");
4709 v
= PyEval_EvalCode((PyCodeObject
*) prog
, globals
, locals
);
4711 else if (PyFile_Check(prog
)) {
4712 FILE *fp
= PyFile_AsFile(prog
);
4713 char *name
= PyString_AsString(PyFile_Name(prog
));
4718 if (PyEval_MergeCompilerFlags(&cf
))
4719 v
= PyRun_FileFlags(fp
, name
, Py_file_input
, globals
,
4722 v
= PyRun_File(fp
, name
, Py_file_input
, globals
,
4726 PyObject
*tmp
= NULL
;
4730 #ifdef Py_USING_UNICODE
4731 if (PyUnicode_Check(prog
)) {
4732 tmp
= PyUnicode_AsUTF8String(prog
);
4736 cf
.cf_flags
|= PyCF_SOURCE_IS_UTF8
;
4739 if (PyString_AsStringAndSize(prog
, &str
, NULL
))
4741 if (PyEval_MergeCompilerFlags(&cf
))
4742 v
= PyRun_StringFlags(str
, Py_file_input
, globals
,
4745 v
= PyRun_String(str
, Py_file_input
, globals
, locals
);
4749 PyFrame_LocalsToFast(f
, 0);
4757 format_exc_check_arg(PyObject
*exc
, char *format_str
, PyObject
*obj
)
4764 obj_str
= PyString_AsString(obj
);
4768 PyErr_Format(exc
, format_str
, obj_str
);
4772 string_concatenate(PyObject
*v
, PyObject
*w
,
4773 PyFrameObject
*f
, unsigned char *next_instr
)
4775 /* This function implements 'variable += expr' when both arguments
4777 Py_ssize_t v_len
= PyString_GET_SIZE(v
);
4778 Py_ssize_t w_len
= PyString_GET_SIZE(w
);
4779 Py_ssize_t new_len
= v_len
+ w_len
;
4781 PyErr_SetString(PyExc_OverflowError
,
4782 "strings are too large to concat");
4786 if (v
->ob_refcnt
== 2) {
4787 /* In the common case, there are 2 references to the value
4788 * stored in 'variable' when the += is performed: one on the
4789 * value stack (in 'v') and one still stored in the
4790 * 'variable'. We try to delete the variable now to reduce
4793 switch (*next_instr
) {
4796 int oparg
= PEEKARG();
4797 PyObject
**fastlocals
= f
->f_localsplus
;
4798 if (GETLOCAL(oparg
) == v
)
4799 SETLOCAL(oparg
, NULL
);
4804 PyObject
**freevars
= (f
->f_localsplus
+
4805 f
->f_code
->co_nlocals
);
4806 PyObject
*c
= freevars
[PEEKARG()];
4807 if (PyCell_GET(c
) == v
)
4808 PyCell_Set(c
, NULL
);
4813 PyObject
*names
= f
->f_code
->co_names
;
4814 PyObject
*name
= GETITEM(names
, PEEKARG());
4815 PyObject
*locals
= f
->f_locals
;
4816 if (PyDict_CheckExact(locals
) &&
4817 PyDict_GetItem(locals
, name
) == v
) {
4818 if (PyDict_DelItem(locals
, name
) != 0) {
4827 if (v
->ob_refcnt
== 1 && !PyString_CHECK_INTERNED(v
)) {
4828 /* Now we own the last reference to 'v', so we can resize it
4831 if (_PyString_Resize(&v
, new_len
) != 0) {
4832 /* XXX if _PyString_Resize() fails, 'v' has been
4833 * deallocated so it cannot be put back into
4834 * 'variable'. The MemoryError is raised when there
4835 * is no value in 'variable', which might (very
4836 * remotely) be a cause of incompatibilities.
4840 /* copy 'w' into the newly allocated area of 'v' */
4841 memcpy(PyString_AS_STRING(v
) + v_len
,
4842 PyString_AS_STRING(w
), w_len
);
4846 /* When in-place resizing is not an option. */
4847 PyString_Concat(&v
, w
);
4852 #ifdef DYNAMIC_EXECUTION_PROFILE
4855 getarray(long a
[256])
4858 PyObject
*l
= PyList_New(256);
4859 if (l
== NULL
) return NULL
;
4860 for (i
= 0; i
< 256; i
++) {
4861 PyObject
*x
= PyInt_FromLong(a
[i
]);
4866 PyList_SetItem(l
, i
, x
);
4868 for (i
= 0; i
< 256; i
++)
4874 _Py_GetDXProfile(PyObject
*self
, PyObject
*args
)
4877 return getarray(dxp
);
4880 PyObject
*l
= PyList_New(257);
4881 if (l
== NULL
) return NULL
;
4882 for (i
= 0; i
< 257; i
++) {
4883 PyObject
*x
= getarray(dxpairs
[i
]);
4888 PyList_SetItem(l
, i
, x
);