Bug Summary

File:Python/ceval.c
Location:line 3045, column 17
Description:Value stored to 'why' is never read

Annotated Source Code

1
2/* Execute compiled code */
3
4/* XXX TO DO:
5 XXX speed up searching for keywords by using a dictionary
6 XXX document it!
7 */
8
9/* enable more aggressive intra-module optimizations, where available */
10#define PY_LOCAL_AGGRESSIVE
11
12#include "Python.h"
13
14#include "code.h"
15#include "frameobject.h"
16#include "opcode.h"
17#include "structmember.h"
18
19#include <ctype.h>
20
21#ifndef WITH_TSC
22
23#define READ_TIMESTAMP(var)
24
25#else
26
27typedef unsigned long long uint64;
28
29/* PowerPC suppport.
30 "__ppc__" appears to be the preprocessor definition to detect on OS X, whereas
31 "__powerpc__" appears to be the correct one for Linux with GCC
32*/
33#if defined(__ppc__) || defined (__powerpc__)
34
35#define READ_TIMESTAMP(var) ppc_getcounter(&var)
36
37static void
38ppc_getcounter(uint64 *v)
39{
40 register unsigned long tbu, tb, tbu2;
41
42 loop:
43 asm volatile ("mftbu %0" : "=r" (tbu) );
44 asm volatile ("mftb %0" : "=r" (tb) );
45 asm volatile ("mftbu %0" : "=r" (tbu2));
46 if (__builtin_expect(tbu != tbu2, 0)) goto loop;
47
48 /* The slightly peculiar way of writing the next lines is
49 compiled better by GCC than any other way I tried. */
50 ((long*)(v))[0] = tbu;
51 ((long*)(v))[1] = tb;
52}
53
54#elif defined(__i386__)
55
56/* this is for linux/x86 (and probably any other GCC/x86 combo) */
57
58#define READ_TIMESTAMP(val) \
59 __asm__ __volatile__("rdtsc" : "=A" (val))
60
61#elif defined(__x86_64__1)
62
63/* for gcc/x86_64, the "A" constraint in DI mode means *either* rax *or* rdx;
64 not edx:eax as it does for i386. Since rdtsc puts its result in edx:eax
65 even in 64-bit mode, we need to use "a" and "d" for the lower and upper
66 32-bit pieces of the result. */
67
68#define READ_TIMESTAMP(val) \
69 __asm__ __volatile__("rdtsc" : \
70 "=a" (((int*)&(val))[0]), "=d" (((int*)&(val))[1]));
71
72
73#else
74
75#error "Don't know how to implement timestamp counter for this architecture"
76
77#endif
78
79void dump_tsc(int opcode, int ticked, uint64 inst0, uint64 inst1,
80 uint64 loop0, uint64 loop1, uint64 intr0, uint64 intr1)
81{
82 uint64 intr, inst, loop;
83 PyThreadState *tstate = PyThreadState_Get();
84 if (!tstate->interp->tscdump)
85 return;
86 intr = intr1 - intr0;
87 inst = inst1 - inst0 - intr;
88 loop = loop1 - loop0 - intr;
89 fprintf(stderr__stderrp, "opcode=%03d t=%d inst=%06lld loop=%06lld\n",
90 opcode, ticked, inst, loop);
91}
92
93#endif
94
95/* Turn this on if your compiler chokes on the big switch: */
96/* #define CASE_TOO_BIG 1 */
97
98#ifdef Py_DEBUG1
99/* For debugging the interpreter: */
100#define LLTRACE1 1 /* Low-level trace feature */
101#define CHECKEXC1 1 /* Double-check exception checking */
102#endif
103
104typedef PyObject *(*callproc)(PyObject *, PyObject *, PyObject *);
105
106/* Forward declarations */
107#ifdef WITH_TSC
108static PyObject * call_function(PyObject ***, int, uint64*, uint64*);
109#else
110static PyObject * call_function(PyObject ***, int);
111#endif
112static PyObject * fast_function(PyObject *, PyObject ***, int, int, int);
113static PyObject * do_call(PyObject *, PyObject ***, int, int);
114static PyObject * ext_do_call(PyObject *, PyObject ***, int, int, int);
115static PyObject * update_keyword_args(PyObject *, int, PyObject ***,
116 PyObject *);
117static PyObject * update_star_args(int, int, PyObject *, PyObject ***);
118static PyObject * load_args(PyObject ***, int);
119#define CALL_FLAG_VAR1 1
120#define CALL_FLAG_KW2 2
121
122#ifdef LLTRACE1
123static int lltrace;
124static int prtrace(PyObject *, char *);
125#endif
126static int call_trace(Py_tracefunc, PyObject *, PyFrameObject *,
127 int, PyObject *);
128static int call_trace_protected(Py_tracefunc, PyObject *,
129 PyFrameObject *, int, PyObject *);
130static void call_exc_trace(Py_tracefunc, PyObject *, PyFrameObject *);
131static int maybe_call_line_trace(Py_tracefunc, PyObject *,
132 PyFrameObject *, int *, int *, int *);
133
134static PyObject * cmp_outcome(int, PyObject *, PyObject *);
135static PyObject * import_from(PyObject *, PyObject *);
136static int import_all_from(PyObject *, PyObject *);
137static void format_exc_check_arg(PyObject *, const char *, PyObject *);
138static void format_exc_unbound(PyCodeObject *co, int oparg);
139static PyObject * unicode_concatenate(PyObject *, PyObject *,
140 PyFrameObject *, unsigned char *);
141static PyObject * special_lookup(PyObject *, char *, PyObject **);
142
143#define NAME_ERROR_MSG"name '%.200s' is not defined" \
144 "name '%.200s' is not defined"
145#define GLOBAL_NAME_ERROR_MSG"global name '%.200s' is not defined" \
146 "global name '%.200s' is not defined"
147#define UNBOUNDLOCAL_ERROR_MSG"local variable '%.200s' referenced before assignment" \
148 "local variable '%.200s' referenced before assignment"
149#define UNBOUNDFREE_ERROR_MSG"free variable '%.200s' referenced before assignment" " in enclosing scope" \
150 "free variable '%.200s' referenced before assignment" \
151 " in enclosing scope"
152
153/* Dynamic execution profile */
154#ifdef DYNAMIC_EXECUTION_PROFILE
155#ifdef DXPAIRS
156static long dxpairs[257][256];
157#define dxp dxpairs[256]
158#else
159static long dxp[256];
160#endif
161#endif
162
163/* Function call profile */
164#ifdef CALL_PROFILE
165#define PCALL_NUM 11
166static int pcall[PCALL_NUM];
167
168#define PCALL_ALL 0
169#define PCALL_FUNCTION 1
170#define PCALL_FAST_FUNCTION 2
171#define PCALL_FASTER_FUNCTION 3
172#define PCALL_METHOD 4
173#define PCALL_BOUND_METHOD 5
174#define PCALL_CFUNCTION 6
175#define PCALL_TYPE 7
176#define PCALL_GENERATOR 8
177#define PCALL_OTHER 9
178#define PCALL_POP 10
179
180/* Notes about the statistics
181
182 PCALL_FAST stats
183
184 FAST_FUNCTION means no argument tuple needs to be created.
185 FASTER_FUNCTION means that the fast-path frame setup code is used.
186
187 If there is a method call where the call can be optimized by changing
188 the argument tuple and calling the function directly, it gets recorded
189 twice.
190
191 As a result, the relationship among the statistics appears to be
192 PCALL_ALL == PCALL_FUNCTION + PCALL_METHOD - PCALL_BOUND_METHOD +
193 PCALL_CFUNCTION + PCALL_TYPE + PCALL_GENERATOR + PCALL_OTHER
194 PCALL_FUNCTION > PCALL_FAST_FUNCTION > PCALL_FASTER_FUNCTION
195 PCALL_METHOD > PCALL_BOUND_METHOD
196*/
197
198#define PCALL(POS) pcall[POS]++
199
200PyObject *
201PyEval_GetCallStats(PyObject *self)
202{
203 return Py_BuildValue("iiiiiiiiiii",
204 pcall[0], pcall[1], pcall[2], pcall[3],
205 pcall[4], pcall[5], pcall[6], pcall[7],
206 pcall[8], pcall[9], pcall[10]);
207}
208#else
209#define PCALL(O)
210
211PyObject *
212PyEval_GetCallStats(PyObject *self)
213{
214 Py_INCREF(Py_None)( _Py_RefTotal++ , ((PyObject*)((&_Py_NoneStruct)))->ob_refcnt
++)
;
215 return Py_None(&_Py_NoneStruct);
216}
217#endif
218
219
220#ifdef WITH_THREAD1
221#define GIL_REQUEST__extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
_Py_atomic_load_relaxed(&gil_drop_request)__extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
222#else
223#define GIL_REQUEST__extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
0
224#endif
225
226/* This can set eval_breaker to 0 even though gil_drop_request became
227 1. We believe this is all right because the eval loop will release
228 the GIL eventually anyway. */
229#define COMPUTE_EVAL_BREAKER()__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
\
230 _Py_atomic_store_relaxed( \__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
231 &eval_breaker, \__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
232 GIL_REQUEST | \__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
233 _Py_atomic_load_relaxed(&pendingcalls_to_do) | \__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
234 pending_async_exc)__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
235
236#ifdef WITH_THREAD1
237
238#define SET_GIL_DROP_REQUEST()do { __extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
\
239 do { \
240 _Py_atomic_store_relaxed(&gil_drop_request, 1)__extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; \
241 _Py_atomic_store_relaxed(&eval_breaker, 1)__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = 1; volatile
__typeof__(new_val) *volatile_data = &atomic_val->_value
; _Py_memory_order order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER
(atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; })
; \
242 } while (0)
243
244#define RESET_GIL_DROP_REQUEST()do { __extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) new_val
= 0; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= __extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | __extension__ ({ __typeof__
(&pendingcalls_to_do) atomic_val = &pendingcalls_to_do
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | pending_async_exc; volatile __typeof__
(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; }); } while (0)
\
245 do { \
246 _Py_atomic_store_relaxed(&gil_drop_request, 0)__extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) new_val
= 0; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; \
247 COMPUTE_EVAL_BREAKER()__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; \
248 } while (0)
249
250#endif
251
252/* Pending calls are only modified under pending_lock */
253#define SIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 1; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
\
254 do { \
255 _Py_atomic_store_relaxed(&pendingcalls_to_do, 1)__extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 1; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; \
256 _Py_atomic_store_relaxed(&eval_breaker, 1)__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = 1; volatile
__typeof__(new_val) *volatile_data = &atomic_val->_value
; _Py_memory_order order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER
(atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; })
; \
257 } while (0)
258
259#define UNSIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 0; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= __extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | __extension__ ({ __typeof__
(&pendingcalls_to_do) atomic_val = &pendingcalls_to_do
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | pending_async_exc; volatile __typeof__
(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; }); } while (0)
\
260 do { \
261 _Py_atomic_store_relaxed(&pendingcalls_to_do, 0)__extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 0; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; \
262 COMPUTE_EVAL_BREAKER()__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; \
263 } while (0)
264
265#define SIGNAL_ASYNC_EXC()do { pending_async_exc = 1; __extension__ ({ __typeof__(&
eval_breaker) atomic_val = &eval_breaker; __typeof__(atomic_val
->_value) new_val = 1; volatile __typeof__(new_val) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
\
266 do { \
267 pending_async_exc = 1; \
268 _Py_atomic_store_relaxed(&eval_breaker, 1)__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = 1; volatile
__typeof__(new_val) *volatile_data = &atomic_val->_value
; _Py_memory_order order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER
(atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; })
; \
269 } while (0)
270
271#define UNSIGNAL_ASYNC_EXC()do { pending_async_exc = 0; __extension__ ({ __typeof__(&
eval_breaker) atomic_val = &eval_breaker; __typeof__(atomic_val
->_value) new_val = __extension__ ({ __typeof__(&gil_drop_request
) atomic_val = &gil_drop_request; __typeof__(atomic_val->
_value) result; volatile __typeof__(result) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | __extension__ ({ __typeof__
(&pendingcalls_to_do) atomic_val = &pendingcalls_to_do
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | pending_async_exc; volatile __typeof__
(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; }); } while (0)
\
272 do { pending_async_exc = 0; COMPUTE_EVAL_BREAKER()__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) new_val = __extension__
({ __typeof__(&gil_drop_request) atomic_val = &gil_drop_request
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | __extension__ ({ __typeof__(&pendingcalls_to_do
) atomic_val = &pendingcalls_to_do; __typeof__(atomic_val
->_value) result; volatile __typeof__(result) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | pending_async_exc;
volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; })
; } while (0)
273
274
275#ifdef WITH_THREAD1
276
277#ifdef HAVE_ERRNO_H1
278#include <errno(*__error()).h>
279#endif
280#include "pythread.h"
281
282static PyThread_type_lock pending_lock = 0; /* for pending calls */
283static long main_thread = 0;
284/* This single variable consolidates all requests to break out of the fast path
285 in the eval loop. */
286static _Py_atomic_int eval_breaker = {0};
287/* Request for dropping the GIL */
288static _Py_atomic_int gil_drop_request = {0};
289/* Request for running pending calls. */
290static _Py_atomic_int pendingcalls_to_do = {0};
291/* Request for looking at the `async_exc` field of the current thread state.
292 Guarded by the GIL. */
293static int pending_async_exc = 0;
294
295#include "ceval_gil.h"
296
297int
298PyEval_ThreadsInitialized(void)
299{
300 return gil_created();
301}
302
303void
304PyEval_InitThreads(void)
305{
306 if (gil_created())
307 return;
308 create_gil();
309 take_gil(PyThreadState_GET()PyThreadState_Get());
310 main_thread = PyThread_get_thread_ident();
311 if (!pending_lock)
312 pending_lock = PyThread_allocate_lock();
313}
314
315void
316_PyEval_FiniThreads(void)
317{
318 if (!gil_created())
319 return;
320 destroy_gil();
321 assert(!gil_created())(__builtin_expect(!(!gil_created()), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 321, "!gil_created()") : (void)0)
;
322}
323
324void
325PyEval_AcquireLock(void)
326{
327 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
328 if (tstate == NULL((void*)0))
329 Py_FatalError("PyEval_AcquireLock: current thread state is NULL");
330 take_gil(tstate);
331}
332
333void
334PyEval_ReleaseLock(void)
335{
336 /* This function must succeed when the current thread state is NULL.
337 We therefore avoid PyThreadState_GET() which dumps a fatal error
338 in debug mode.
339 */
340 drop_gil((PyThreadState*)_Py_atomic_load_relaxed(__extension__ ({ __typeof__(&_PyThreadState_Current) atomic_val
= &_PyThreadState_Current; __typeof__(atomic_val->_value
) result; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
341 &_PyThreadState_Current)__extension__ ({ __typeof__(&_PyThreadState_Current) atomic_val
= &_PyThreadState_Current; __typeof__(atomic_val->_value
) result; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
);
342}
343
344void
345PyEval_AcquireThread(PyThreadState *tstate)
346{
347 if (tstate == NULL((void*)0))
348 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
349 /* Check someone has called PyEval_InitThreads() to create the lock */
350 assert(gil_created())(__builtin_expect(!(gil_created()), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 350, "gil_created()") : (void)0)
;
351 take_gil(tstate);
352 if (PyThreadState_Swap(tstate) != NULL((void*)0))
353 Py_FatalError(
354 "PyEval_AcquireThread: non-NULL old thread state");
355}
356
357void
358PyEval_ReleaseThread(PyThreadState *tstate)
359{
360 if (tstate == NULL((void*)0))
361 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
362 if (PyThreadState_Swap(NULL((void*)0)) != tstate)
363 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
364 drop_gil(tstate);
365}
366
367/* This function is called from PyOS_AfterFork to ensure that newly
368 created child processes don't hold locks referring to threads which
369 are not running in the child process. (This could also be done using
370 pthread_atfork mechanism, at least for the pthreads implementation.) */
371
372void
373PyEval_ReInitThreads(void)
374{
375 PyObject *threading, *result;
376 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
377
378 if (!gil_created())
379 return;
380 recreate_gil();
381 pending_lock = PyThread_allocate_lock();
382 take_gil(tstate);
383 main_thread = PyThread_get_thread_ident();
384
385 /* Update the threading module with the new state.
386 */
387 tstate = PyThreadState_GET()PyThreadState_Get();
388 threading = PyMapping_GetItemString(tstate->interp->modules,
389 "threading");
390 if (threading == NULL((void*)0)) {
391 /* threading not imported */
392 PyErr_Clear();
393 return;
394 }
395 result = PyObject_CallMethod(threading, "_after_fork", NULL((void*)0));
396 if (result == NULL((void*)0))
397 PyErr_WriteUnraisable(threading);
398 else
399 Py_DECREF(result)do { if (_Py_RefTotal-- , --((PyObject*)(result))->ob_refcnt
!= 0) { if (((PyObject*)result)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 399, (PyObject *)(result)); } else _Py_Dealloc
((PyObject *)(result)); } while (0)
;
400 Py_DECREF(threading)do { if (_Py_RefTotal-- , --((PyObject*)(threading))->ob_refcnt
!= 0) { if (((PyObject*)threading)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 400, (PyObject *)(threading)); } else _Py_Dealloc
((PyObject *)(threading)); } while (0)
;
401}
402
403#else
404static _Py_atomic_int eval_breaker = {0};
405static int pending_async_exc = 0;
406#endif /* WITH_THREAD */
407
408/* This function is used to signal that async exceptions are waiting to be
409 raised, therefore it is also useful in non-threaded builds. */
410
411void
412_PyEval_SignalAsyncExc(void)
413{
414 SIGNAL_ASYNC_EXC()do { pending_async_exc = 1; __extension__ ({ __typeof__(&
eval_breaker) atomic_val = &eval_breaker; __typeof__(atomic_val
->_value) new_val = 1; volatile __typeof__(new_val) *volatile_data
= &atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
;
415}
416
417/* Functions save_thread and restore_thread are always defined so
418 dynamically loaded modules needn't be compiled separately for use
419 with and without threads: */
420
421PyThreadState *
422PyEval_SaveThread(void)
423{
424 PyThreadState *tstate = PyThreadState_Swap(NULL((void*)0));
425 if (tstate == NULL((void*)0))
426 Py_FatalError("PyEval_SaveThread: NULL tstate");
427#ifdef WITH_THREAD1
428 if (gil_created())
429 drop_gil(tstate);
430#endif
431 return tstate;
432}
433
434void
435PyEval_RestoreThread(PyThreadState *tstate)
436{
437 if (tstate == NULL((void*)0))
438 Py_FatalError("PyEval_RestoreThread: NULL tstate");
439#ifdef WITH_THREAD1
440 if (gil_created()) {
441 int err = errno(*__error());
442 take_gil(tstate);
443 errno(*__error()) = err;
444 }
445#endif
446 PyThreadState_Swap(tstate);
447}
448
449
450/* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
451 signal handlers or Mac I/O completion routines) can schedule calls
452 to a function to be called synchronously.
453 The synchronous function is called with one void* argument.
454 It should return 0 for success or -1 for failure -- failure should
455 be accompanied by an exception.
456
457 If registry succeeds, the registry function returns 0; if it fails
458 (e.g. due to too many pending calls) it returns -1 (without setting
459 an exception condition).
460
461 Note that because registry may occur from within signal handlers,
462 or other asynchronous events, calling malloc() is unsafe!
463
464#ifdef WITH_THREAD
465 Any thread can schedule pending calls, but only the main thread
466 will execute them.
467 There is no facility to schedule calls to a particular thread, but
468 that should be easy to change, should that ever be required. In
469 that case, the static variables here should go into the python
470 threadstate.
471#endif
472*/
473
474#ifdef WITH_THREAD1
475
476/* The WITH_THREAD implementation is thread-safe. It allows
477 scheduling to be made from any thread, and even from an executing
478 callback.
479 */
480
481#define NPENDINGCALLS32 32
482static struct {
483 int (*func)(void *);
484 void *arg;
485} pendingcalls[NPENDINGCALLS32];
486static int pendingfirst = 0;
487static int pendinglast = 0;
488static char pendingbusy = 0;
489
490int
491Py_AddPendingCall(int (*func)(void *), void *arg)
492{
493 int i, j, result=0;
494 PyThread_type_lock lock = pending_lock;
495
496 /* try a few times for the lock. Since this mechanism is used
497 * for signal handling (on the main thread), there is a (slim)
498 * chance that a signal is delivered on the same thread while we
499 * hold the lock during the Py_MakePendingCalls() function.
500 * This avoids a deadlock in that case.
501 * Note that signals can be delivered on any thread. In particular,
502 * on Windows, a SIGINT is delivered on a system-created worker
503 * thread.
504 * We also check for lock being NULL, in the unlikely case that
505 * this function is called before any bytecode evaluation takes place.
506 */
507 if (lock != NULL((void*)0)) {
508 for (i = 0; i<100; i++) {
509 if (PyThread_acquire_lock(lock, NOWAIT_LOCK0))
510 break;
511 }
512 if (i == 100)
513 return -1;
514 }
515
516 i = pendinglast;
517 j = (i + 1) % NPENDINGCALLS32;
518 if (j == pendingfirst) {
519 result = -1; /* Queue full */
520 } else {
521 pendingcalls[i].func = func;
522 pendingcalls[i].arg = arg;
523 pendinglast = j;
524 }
525 /* signal main loop */
526 SIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 1; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
;
527 if (lock != NULL((void*)0))
528 PyThread_release_lock(lock);
529 return result;
530}
531
532int
533Py_MakePendingCalls(void)
534{
535 int i;
536 int r = 0;
537
538 if (!pending_lock) {
539 /* initial allocation of the lock */
540 pending_lock = PyThread_allocate_lock();
541 if (pending_lock == NULL((void*)0))
542 return -1;
543 }
544
545 /* only service pending calls on main thread */
546 if (main_thread && PyThread_get_thread_ident() != main_thread)
547 return 0;
548 /* don't perform recursive pending calls */
549 if (pendingbusy)
550 return 0;
551 pendingbusy = 1;
552 /* perform a bounded number of calls, in case of recursion */
553 for (i=0; i<NPENDINGCALLS32; i++) {
554 int j;
555 int (*func)(void *);
556 void *arg = NULL((void*)0);
557
558 /* pop one item off the queue while holding the lock */
559 PyThread_acquire_lock(pending_lock, WAIT_LOCK1);
560 j = pendingfirst;
561 if (j == pendinglast) {
562 func = NULL((void*)0); /* Queue empty */
563 } else {
564 func = pendingcalls[j].func;
565 arg = pendingcalls[j].arg;
566 pendingfirst = (j + 1) % NPENDINGCALLS32;
567 }
568 if (pendingfirst != pendinglast)
569 SIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 1; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
;
570 else
571 UNSIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 0; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= __extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | __extension__ ({ __typeof__
(&pendingcalls_to_do) atomic_val = &pendingcalls_to_do
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | pending_async_exc; volatile __typeof__
(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; }); } while (0)
;
572 PyThread_release_lock(pending_lock);
573 /* having released the lock, perform the callback */
574 if (func == NULL((void*)0))
575 break;
576 r = func(arg);
577 if (r)
578 break;
579 }
580 pendingbusy = 0;
581 return r;
582}
583
584#else /* if ! defined WITH_THREAD */
585
586/*
587 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
588 This code is used for signal handling in python that isn't built
589 with WITH_THREAD.
590 Don't use this implementation when Py_AddPendingCalls() can happen
591 on a different thread!
592
593 There are two possible race conditions:
594 (1) nested asynchronous calls to Py_AddPendingCall()
595 (2) AddPendingCall() calls made while pending calls are being processed.
596
597 (1) is very unlikely because typically signal delivery
598 is blocked during signal handling. So it should be impossible.
599 (2) is a real possibility.
600 The current code is safe against (2), but not against (1).
601 The safety against (2) is derived from the fact that only one
602 thread is present, interrupted by signals, and that the critical
603 section is protected with the "busy" variable. On Windows, which
604 delivers SIGINT on a system thread, this does not hold and therefore
605 Windows really shouldn't use this version.
606 The two threads could theoretically wiggle around the "busy" variable.
607*/
608
609#define NPENDINGCALLS32 32
610static struct {
611 int (*func)(void *);
612 void *arg;
613} pendingcalls[NPENDINGCALLS32];
614static volatile int pendingfirst = 0;
615static volatile int pendinglast = 0;
616static _Py_atomic_int pendingcalls_to_do = {0};
617
618int
619Py_AddPendingCall(int (*func)(void *), void *arg)
620{
621 static volatile int busy = 0;
622 int i, j;
623 /* XXX Begin critical section */
624 if (busy)
625 return -1;
626 busy = 1;
627 i = pendinglast;
628 j = (i + 1) % NPENDINGCALLS32;
629 if (j == pendingfirst) {
630 busy = 0;
631 return -1; /* Queue full */
632 }
633 pendingcalls[i].func = func;
634 pendingcalls[i].arg = arg;
635 pendinglast = j;
636
637 SIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 1; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
;
638 busy = 0;
639 /* XXX End critical section */
640 return 0;
641}
642
643int
644Py_MakePendingCalls(void)
645{
646 static int busy = 0;
647 if (busy)
648 return 0;
649 busy = 1;
650 UNSIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 0; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= __extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | __extension__ ({ __typeof__
(&pendingcalls_to_do) atomic_val = &pendingcalls_to_do
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | pending_async_exc; volatile __typeof__
(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; }); } while (0)
;
651 for (;;) {
652 int i;
653 int (*func)(void *);
654 void *arg;
655 i = pendingfirst;
656 if (i == pendinglast)
657 break; /* Queue empty */
658 func = pendingcalls[i].func;
659 arg = pendingcalls[i].arg;
660 pendingfirst = (i + 1) % NPENDINGCALLS32;
661 if (func(arg) < 0) {
662 busy = 0;
663 SIGNAL_PENDING_CALLS()do { __extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
new_val = 1; volatile __typeof__(new_val) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); __extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) new_val
= 1; volatile __typeof__(new_val) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: _Py_atomic_signal_fence(_Py_memory_order_release
); case _Py_memory_order_relaxed: *volatile_data = new_val; break
; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1"
: "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break
; } ; }); } while (0)
; /* We're not done yet */
664 return -1;
665 }
666 }
667 busy = 0;
668 return 0;
669}
670
671#endif /* WITH_THREAD */
672
673
674/* The interpreter's recursion limit */
675
676#ifndef Py_DEFAULT_RECURSION_LIMIT1000
677#define Py_DEFAULT_RECURSION_LIMIT1000 1000
678#endif
679static int recursion_limit = Py_DEFAULT_RECURSION_LIMIT1000;
680int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT1000;
681
682int
683Py_GetRecursionLimit(void)
684{
685 return recursion_limit;
686}
687
688void
689Py_SetRecursionLimit(int new_limit)
690{
691 recursion_limit = new_limit;
692 _Py_CheckRecursionLimit = recursion_limit;
693}
694
695/* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
696 if the recursion_depth reaches _Py_CheckRecursionLimit.
697 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
698 to guarantee that _Py_CheckRecursiveCall() is regularly called.
699 Without USE_STACKCHECK, there is no need for this. */
700int
701_Py_CheckRecursiveCall(char *where)
702{
703 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
704
705#ifdef USE_STACKCHECK
706 if (PyOS_CheckStack()) {
707 --tstate->recursion_depth;
708 PyErr_SetString(PyExc_MemoryError, "Stack overflow");
709 return -1;
710 }
711#endif
712 _Py_CheckRecursionLimit = recursion_limit;
713 if (tstate->recursion_critical)
714 /* Somebody asked that we don't check for recursion. */
715 return 0;
716 if (tstate->overflowed) {
717 if (tstate->recursion_depth > recursion_limit + 50) {
718 /* Overflowing while handling an overflow. Give up. */
719 Py_FatalError("Cannot recover from stack overflow.");
720 }
721 return 0;
722 }
723 if (tstate->recursion_depth > recursion_limit) {
724 --tstate->recursion_depth;
725 tstate->overflowed = 1;
726 PyErr_Format(PyExc_RuntimeError,
727 "maximum recursion depth exceeded%s",
728 where);
729 return -1;
730 }
731 return 0;
732}
733
734/* Status code for main loop (reason for stack unwind) */
735enum why_code {
736 WHY_NOT = 0x0001, /* No error */
737 WHY_EXCEPTION = 0x0002, /* Exception occurred */
738 WHY_RERAISE = 0x0004, /* Exception re-raised by 'finally' */
739 WHY_RETURN = 0x0008, /* 'return' statement */
740 WHY_BREAK = 0x0010, /* 'break' statement */
741 WHY_CONTINUE = 0x0020, /* 'continue' statement */
742 WHY_YIELD = 0x0040, /* 'yield' operator */
743 WHY_SILENCED = 0x0080 /* Exception silenced by 'with' */
744};
745
746static enum why_code do_raise(PyObject *, PyObject *);
747static int unpack_iterable(PyObject *, int, int, PyObject **);
748
749/* Records whether tracing is on for any thread. Counts the number of
750 threads for which tstate->c_tracefunc is non-NULL, so if the value
751 is 0, we know we don't have to check this thread's c_tracefunc.
752 This speeds up the if statement in PyEval_EvalFrameEx() after
753 fast_next_opcode*/
754static int _Py_TracingPossible = 0;
755
756
757
758PyObject *
759PyEval_EvalCode(PyObject *co, PyObject *globals, PyObject *locals)
760{
761 return PyEval_EvalCodeEx(co,
762 globals, locals,
763 (PyObject **)NULL((void*)0), 0,
764 (PyObject **)NULL((void*)0), 0,
765 (PyObject **)NULL((void*)0), 0,
766 NULL((void*)0), NULL((void*)0));
767}
768
769
770/* Interpreter main loop */
771
772PyObject *
773PyEval_EvalFrame(PyFrameObject *f) {
774 /* This is for backward compatibility with extension modules that
775 used this API; core interpreter code should call
776 PyEval_EvalFrameEx() */
777 return PyEval_EvalFrameEx(f, 0);
778}
779
780PyObject *
781PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
782{
783#ifdef DXPAIRS
784 int lastopcode = 0;
785#endif
786 register PyObject **stack_pointer; /* Next free slot in value stack */
787 register unsigned char *next_instr;
788 register int opcode; /* Current opcode */
789 register int oparg; /* Current opcode argument, if any */
790 register enum why_code why; /* Reason for block stack unwind */
791 register int err; /* Error status -- nonzero if error */
792 register PyObject *x; /* Result object -- NULL if error */
793 register PyObject *v; /* Temporary objects popped off stack */
794 register PyObject *w;
795 register PyObject *u;
796 register PyObject *t;
797 register PyObject **fastlocals, **freevars;
798 PyObject *retval = NULL((void*)0); /* Return value */
799 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
800 PyCodeObject *co;
801
802 /* when tracing we set things up so that
803
804 not (instr_lb <= current_bytecode_offset < instr_ub)
805
806 is true when the line being executed has changed. The
807 initial values are such as to make this false the first
808 time it is tested. */
809 int instr_ub = -1, instr_lb = 0, instr_prev = -1;
810
811 unsigned char *first_instr;
812 PyObject *names;
813 PyObject *consts;
814#if defined(Py_DEBUG1) || defined(LLTRACE1)
815 /* Make it easier to find out where we are with a debugger */
816 char *filename;
817#endif
818
819/* Computed GOTOs, or
820 the-optimization-commonly-but-improperly-known-as-"threaded code"
821 using gcc's labels-as-values extension
822 (http://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html).
823
824 The traditional bytecode evaluation loop uses a "switch" statement, which
825 decent compilers will optimize as a single indirect branch instruction
826 combined with a lookup table of jump addresses. However, since the
827 indirect jump instruction is shared by all opcodes, the CPU will have a
828 hard time making the right prediction for where to jump next (actually,
829 it will be always wrong except in the uncommon case of a sequence of
830 several identical opcodes).
831
832 "Threaded code" in contrast, uses an explicit jump table and an explicit
833 indirect jump instruction at the end of each opcode. Since the jump
834 instruction is at a different address for each opcode, the CPU will make a
835 separate prediction for each of these instructions, which is equivalent to
836 predicting the second opcode of each opcode pair. These predictions have
837 a much better chance to turn out valid, especially in small bytecode loops.
838
839 A mispredicted branch on a modern CPU flushes the whole pipeline and
840 can cost several CPU cycles (depending on the pipeline depth),
841 and potentially many more instructions (depending on the pipeline width).
842 A correctly predicted branch, however, is nearly free.
843
844 At the time of this writing, the "threaded code" version is up to 15-20%
845 faster than the normal "switch" version, depending on the compiler and the
846 CPU architecture.
847
848 We disable the optimization if DYNAMIC_EXECUTION_PROFILE is defined,
849 because it would render the measurements invalid.
850
851
852 NOTE: care must be taken that the compiler doesn't try to "optimize" the
853 indirect jumps by sharing them between all opcodes. Such optimizations
854 can be disabled on gcc by using the -fno-gcse flag (or possibly
855 -fno-crossjumping).
856*/
857
858#ifdef DYNAMIC_EXECUTION_PROFILE
859#undef USE_COMPUTED_GOTOS1
860#define USE_COMPUTED_GOTOS1 0
861#endif
862
863#ifdef HAVE_COMPUTED_GOTOS1
864 #ifndef USE_COMPUTED_GOTOS1
865 #define USE_COMPUTED_GOTOS1 1
866 #endif
867#else
868 #if defined(USE_COMPUTED_GOTOS1) && USE_COMPUTED_GOTOS1
869 #error "Computed gotos are not supported on this compiler."
870 #endif
871 #undef USE_COMPUTED_GOTOS1
872 #define USE_COMPUTED_GOTOS1 0
873#endif
874
875#if USE_COMPUTED_GOTOS1
876/* Import the static jump table */
877#include "opcode_targets.h"
878
879/* This macro is used when several opcodes defer to the same implementation
880 (e.g. SETUP_LOOP, SETUP_FINALLY) */
881#define TARGET_WITH_IMPL(op, impl)TARGET_op: opcode = op; if (((op) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case op: goto
impl;
\
882 TARGET_##op: \
883 opcode = op; \
884 if (HAS_ARG(op)((op) >= 90)) \
885 oparg = NEXTARG()(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]); \
886 case op: \
887 goto impl; \
888
889#define TARGET(op)TARGET_op: opcode = op; if (((op) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case op:
\
890 TARGET_##op: \
891 opcode = op; \
892 if (HAS_ARG(op)((op) >= 90)) \
893 oparg = NEXTARG()(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]); \
894 case op:
895
896
897#define DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
\
898 { \
899 if (!_Py_atomic_load_relaxed(&eval_breaker)__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) result; volatile
__typeof__(result) *volatile_data = &atomic_val->_value
; _Py_memory_order order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER
(atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; })
) { \
900 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
; \
901 } \
902 continue; \
903 }
904
905#ifdef LLTRACE1
906#define FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
\
907 { \
908 if (!lltrace && !_Py_TracingPossible) { \
909 f->f_lasti = INSTR_OFFSET()((int)(next_instr - first_instr)); \
910 goto *opcode_targets[*next_instr++]; \
911 } \
912 goto fast_next_opcode; \
913 }
914#else
915#define FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
\
916 { \
917 if (!_Py_TracingPossible) { \
918 f->f_lasti = INSTR_OFFSET()((int)(next_instr - first_instr)); \
919 goto *opcode_targets[*next_instr++]; \
920 } \
921 goto fast_next_opcode; \
922 }
923#endif
924
925#else
926#define TARGET(op)TARGET_op: opcode = op; if (((op) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case op:
\
927 case op:
928#define TARGET_WITH_IMPL(op, impl)TARGET_op: opcode = op; if (((op) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case op: goto
impl;
\
929 /* silence compiler warnings about `impl` unused */ \
930 if (0) goto impl; \
931 case op:
932#define DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
continue
933#define FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
goto fast_next_opcode
934#endif
935
936
937/* Tuple access macros */
938
939#ifndef Py_DEBUG1
940#define GETITEM(v, i)PyTuple_GetItem((v), (i)) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))(((PyTupleObject *)((PyTupleObject *)(v)))->ob_item[(i)])
941#else
942#define GETITEM(v, i)PyTuple_GetItem((v), (i)) PyTuple_GetItem((v), (i))
943#endif
944
945#ifdef WITH_TSC
946/* Use Pentium timestamp counter to mark certain events:
947 inst0 -- beginning of switch statement for opcode dispatch
948 inst1 -- end of switch statement (may be skipped)
949 loop0 -- the top of the mainloop
950 loop1 -- place where control returns again to top of mainloop
951 (may be skipped)
952 intr1 -- beginning of long interruption
953 intr2 -- end of long interruption
954
955 Many opcodes call out to helper C functions. In some cases, the
956 time in those functions should be counted towards the time for the
957 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
958 calls another Python function; there's no point in charge all the
959 bytecode executed by the called function to the caller.
960
961 It's hard to make a useful judgement statically. In the presence
962 of operator overloading, it's impossible to tell if a call will
963 execute new Python code or not.
964
965 It's a case-by-case judgement. I'll use intr1 for the following
966 cases:
967
968 IMPORT_STAR
969 IMPORT_FROM
970 CALL_FUNCTION (and friends)
971
972 */
973 uint64 inst0, inst1, loop0, loop1, intr0 = 0, intr1 = 0;
974 int ticked = 0;
975
976 READ_TIMESTAMP(inst0);
977 READ_TIMESTAMP(inst1);
978 READ_TIMESTAMP(loop0);
979 READ_TIMESTAMP(loop1);
980
981 /* shut up the compiler */
982 opcode = 0;
983#endif
984
985/* Code access macros */
986
987#define INSTR_OFFSET()((int)(next_instr - first_instr)) ((int)(next_instr - first_instr))
988#define NEXTOP()(*next_instr++) (*next_instr++)
989#define NEXTARG()(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]) (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
990#define PEEKARG()((next_instr[2]<<8) + next_instr[1]) ((next_instr[2]<<8) + next_instr[1])
991#define JUMPTO(x)(next_instr = first_instr + (x)) (next_instr = first_instr + (x))
992#define JUMPBY(x)(next_instr += (x)) (next_instr += (x))
993
994/* OpCode prediction macros
995 Some opcodes tend to come in pairs thus making it possible to
996 predict the second code when the first is run. For example,
997 COMPARE_OP is often followed by JUMP_IF_FALSE or JUMP_IF_TRUE. And,
998 those opcodes are often followed by a POP_TOP.
999
1000 Verifying the prediction costs a single high-speed test of a register
1001 variable against a constant. If the pairing was good, then the
1002 processor's own internal branch predication has a high likelihood of
1003 success, resulting in a nearly zero-overhead transition to the
1004 next opcode. A successful prediction saves a trip through the eval-loop
1005 including its two unpredictable branches, the HAS_ARG test and the
1006 switch-case. Combined with the processor's internal branch prediction,
1007 a successful PREDICT has the effect of making the two opcodes run as if
1008 they were a single new opcode with the bodies combined.
1009
1010 If collecting opcode statistics, your choices are to either keep the
1011 predictions turned-on and interpret the results as if some opcodes
1012 had been combined or turn-off predictions so that the opcode frequency
1013 counter updates for both opcodes.
1014
1015 Opcode prediction is disabled with threaded code, since the latter allows
1016 the CPU to record separate branch prediction information for each
1017 opcode.
1018
1019*/
1020
1021#if defined(DYNAMIC_EXECUTION_PROFILE) || USE_COMPUTED_GOTOS1
1022#define PREDICT(op)if (0) goto PRED_op if (0) goto PRED_##op
1023#define PREDICTED(op)PRED_op: PRED_##op:
1024#define PREDICTED_WITH_ARG(op)PRED_op: PRED_##op:
1025#else
1026#define PREDICT(op)if (0) goto PRED_op if (*next_instr == op) goto PRED_##op
1027#define PREDICTED(op)PRED_op: PRED_##op: next_instr++
1028#define PREDICTED_WITH_ARG(op)PRED_op: PRED_##op: oparg = PEEKARG()((next_instr[2]<<8) + next_instr[1]); next_instr += 3
1029#endif
1030
1031
1032/* Stack manipulation macros */
1033
1034/* The stack can grow at most MAXINT deep, as co_nlocals and
1035 co_stacksize are ints. */
1036#define STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)) ((int)(stack_pointer - f->f_valuestack))
1037#define EMPTY()(((int)(stack_pointer - f->f_valuestack)) == 0) (STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)) == 0)
1038#define TOP()(stack_pointer[-1]) (stack_pointer[-1])
1039#define SECOND()(stack_pointer[-2]) (stack_pointer[-2])
1040#define THIRD()(stack_pointer[-3]) (stack_pointer[-3])
1041#define FOURTH()(stack_pointer[-4]) (stack_pointer[-4])
1042#define PEEK(n)(stack_pointer[-(n)]) (stack_pointer[-(n)])
1043#define SET_TOP(v)(stack_pointer[-1] = (v)) (stack_pointer[-1] = (v))
1044#define SET_SECOND(v)(stack_pointer[-2] = (v)) (stack_pointer[-2] = (v))
1045#define SET_THIRD(v)(stack_pointer[-3] = (v)) (stack_pointer[-3] = (v))
1046#define SET_FOURTH(v)(stack_pointer[-4] = (v)) (stack_pointer[-4] = (v))
1047#define SET_VALUE(n, v)(stack_pointer[-(n)] = (v)) (stack_pointer[-(n)] = (v))
1048#define BASIC_STACKADJ(n)(stack_pointer += n) (stack_pointer += n)
1049#define BASIC_PUSH(v)(*stack_pointer++ = (v)) (*stack_pointer++ = (v))
1050#define BASIC_POP()(*--stack_pointer) (*--stack_pointer)
1051
1052#ifdef LLTRACE1
1053#define PUSH(v){ (void)((*stack_pointer++ = (v)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1053, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
{ (void)(BASIC_PUSH(v)(*stack_pointer++ = (v)), \
1054 lltrace && prtrace(TOP()(stack_pointer[-1]), "push")); \
1055 assert(STACK_LEVEL() <= co->co_stacksize)(__builtin_expect(!(((int)(stack_pointer - f->f_valuestack
)) <= co->co_stacksize), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 1055, "STACK_LEVEL() <= co->co_stacksize") : (void)0)
; }
1056#define POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
((void)(lltrace && prtrace(TOP()(stack_pointer[-1]), "pop")), \
1057 BASIC_POP()(*--stack_pointer))
1058#define STACKADJ(n){ (void)((stack_pointer += n), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1058, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
{ (void)(BASIC_STACKADJ(n)(stack_pointer += n), \
1059 lltrace && prtrace(TOP()(stack_pointer[-1]), "stackadj")); \
1060 assert(STACK_LEVEL() <= co->co_stacksize)(__builtin_expect(!(((int)(stack_pointer - f->f_valuestack
)) <= co->co_stacksize), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 1060, "STACK_LEVEL() <= co->co_stacksize") : (void)0)
; }
1061#define EXT_POP(STACK_POINTER)((void)(lltrace && prtrace((STACK_POINTER)[-1], "ext_pop"
)), *--(STACK_POINTER))
((void)(lltrace && \
1062 prtrace((STACK_POINTER)[-1], "ext_pop")), \
1063 *--(STACK_POINTER))
1064#else
1065#define PUSH(v){ (void)((*stack_pointer++ = (v)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1065, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
BASIC_PUSH(v)(*stack_pointer++ = (v))
1066#define POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
BASIC_POP()(*--stack_pointer)
1067#define STACKADJ(n){ (void)((stack_pointer += n), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1067, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
BASIC_STACKADJ(n)(stack_pointer += n)
1068#define EXT_POP(STACK_POINTER)((void)(lltrace && prtrace((STACK_POINTER)[-1], "ext_pop"
)), *--(STACK_POINTER))
(*--(STACK_POINTER))
1069#endif
1070
1071/* Local variable macros */
1072
1073#define GETLOCAL(i)(fastlocals[i]) (fastlocals[i])
1074
1075/* The SETLOCAL() macro must not DECREF the local variable in-place and
1076 then store the new value; it must copy the old value to a temporary
1077 value, then store the new value, and then DECREF the temporary value.
1078 This is because it is possible that during the DECREF the frame is
1079 accessed by other code (e.g. a __del__ method or gc.collect()) and the
1080 variable would be pointing to already-freed memory. */
1081#define SETLOCAL(i, value)do { PyObject *tmp = (fastlocals[i]); (fastlocals[i]) = value
; do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*
)tmp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1081, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
do { PyObject *tmp = GETLOCAL(i)(fastlocals[i]); \
1082 GETLOCAL(i)(fastlocals[i]) = value; \
1083 Py_XDECREF(tmp)do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal-- ,
--((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*)tmp
)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1083, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0)
; } while (0)
1084
1085
1086#define UNWIND_BLOCK(b)while (((int)(stack_pointer - f->f_valuestack)) > (b)->
b_level) { PyObject *v = ((void)(lltrace && prtrace((
stack_pointer[-1]), "pop")), (*--stack_pointer)); do { if ((v
) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject
*)(v))->ob_refcnt != 0) { if (((PyObject*)v)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 1086, (PyObject
*)(v)); } else _Py_Dealloc((PyObject *)(v)); } while (0); } while
(0); }
\
1087 while (STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)) > (b)->b_level) { \
1088 PyObject *v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; \
1089 Py_XDECREF(v)do { if ((v) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(v))->ob_refcnt != 0) { if (((PyObject*)v)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 1089
, (PyObject *)(v)); } else _Py_Dealloc((PyObject *)(v)); } while
(0); } while (0)
; \
1090 }
1091
1092#define UNWIND_EXCEPT_HANDLER(b){ PyObject *type, *value, *traceback; (__builtin_expect(!(((int
)(stack_pointer - f->f_valuestack)) >= (b)->b_level +
3), 0) ? __assert_rtn(__func__, "Python/ceval.c", 1092, "STACK_LEVEL() >= (b)->b_level + 3"
) : (void)0); while (((int)(stack_pointer - f->f_valuestack
)) > (b)->b_level + 3) { value = ((void)(lltrace &&
prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer)); do
{ if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal-- ,
--((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject*
)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1092, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0); } type = tstate->exc_type
; value = tstate->exc_value; traceback = tstate->exc_traceback
; tstate->exc_type = ((void)(lltrace && prtrace((stack_pointer
[-1]), "pop")), (*--stack_pointer)); tstate->exc_value = (
(void)(lltrace && prtrace((stack_pointer[-1]), "pop")
), (*--stack_pointer)); tstate->exc_traceback = ((void)(lltrace
&& prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer
)); do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1092, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0); do { if ((value) == ((void*
)0)) ; else do { if (_Py_RefTotal-- , --((PyObject*)(value))->
ob_refcnt != 0) { if (((PyObject*)value)->ob_refcnt < 0
) _Py_NegativeRefcount("Python/ceval.c", 1092, (PyObject *)(value
)); } else _Py_Dealloc((PyObject *)(value)); } while (0); } while
(0); do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1092, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0); }
\
1093 { \
1094 PyObject *type, *value, *traceback; \
1095 assert(STACK_LEVEL() >= (b)->b_level + 3)(__builtin_expect(!(((int)(stack_pointer - f->f_valuestack
)) >= (b)->b_level + 3), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 1095, "STACK_LEVEL() >= (b)->b_level + 3") : (void)0)
; \
1096 while (STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)) > (b)->b_level + 3) { \
1097 value = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; \
1098 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1098, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
; \
1099 } \
1100 type = tstate->exc_type; \
1101 value = tstate->exc_value; \
1102 traceback = tstate->exc_traceback; \
1103 tstate->exc_type = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; \
1104 tstate->exc_value = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; \
1105 tstate->exc_traceback = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; \
1106 Py_XDECREF(type)do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1106, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0)
; \
1107 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1107, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
; \
1108 Py_XDECREF(traceback)do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1108, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0)
; \
1109 }
1110
1111#define SAVE_EXC_STATE(){ PyObject *type, *value, *traceback; do { if ((tstate->exc_type
) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject*)(tstate
->exc_type))->ob_refcnt++); } while (0); do { if ((tstate
->exc_value) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(tstate->exc_value))->ob_refcnt++); } while (0); do {
if ((tstate->exc_traceback) == ((void*)0)) ; else ( _Py_RefTotal
++ , ((PyObject*)(tstate->exc_traceback))->ob_refcnt++)
; } while (0); type = f->f_exc_type; value = f->f_exc_value
; traceback = f->f_exc_traceback; f->f_exc_type = tstate
->exc_type; f->f_exc_value = tstate->exc_value; f->
f_exc_traceback = tstate->exc_traceback; do { if ((type) ==
((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject*)(
type))->ob_refcnt != 0) { if (((PyObject*)type)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 1111, (PyObject
*)(type)); } else _Py_Dealloc((PyObject *)(type)); } while (
0); } while (0); do { if ((value) == ((void*)0)) ; else do { if
(_Py_RefTotal-- , --((PyObject*)(value))->ob_refcnt != 0)
{ if (((PyObject*)value)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1111, (PyObject *)(value)); } else _Py_Dealloc
((PyObject *)(value)); } while (0); } while (0); do { if ((traceback
) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject
*)(traceback))->ob_refcnt != 0) { if (((PyObject*)traceback
)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1111, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0); }
\
1112 { \
1113 PyObject *type, *value, *traceback; \
1114 Py_XINCREF(tstate->exc_type)do { if ((tstate->exc_type) == ((void*)0)) ; else ( _Py_RefTotal
++ , ((PyObject*)(tstate->exc_type))->ob_refcnt++); } while
(0)
; \
1115 Py_XINCREF(tstate->exc_value)do { if ((tstate->exc_value) == ((void*)0)) ; else ( _Py_RefTotal
++ , ((PyObject*)(tstate->exc_value))->ob_refcnt++); } while
(0)
; \
1116 Py_XINCREF(tstate->exc_traceback)do { if ((tstate->exc_traceback) == ((void*)0)) ; else ( _Py_RefTotal
++ , ((PyObject*)(tstate->exc_traceback))->ob_refcnt++)
; } while (0)
; \
1117 type = f->f_exc_type; \
1118 value = f->f_exc_value; \
1119 traceback = f->f_exc_traceback; \
1120 f->f_exc_type = tstate->exc_type; \
1121 f->f_exc_value = tstate->exc_value; \
1122 f->f_exc_traceback = tstate->exc_traceback; \
1123 Py_XDECREF(type)do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1123, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0)
; \
1124 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1124, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
; \
1125 Py_XDECREF(traceback)do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1125, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0)
; \
1126 }
1127
1128#define SWAP_EXC_STATE(){ PyObject *tmp; tmp = tstate->exc_type; tstate->exc_type
= f->f_exc_type; f->f_exc_type = tmp; tmp = tstate->
exc_value; tstate->exc_value = f->f_exc_value; f->f_exc_value
= tmp; tmp = tstate->exc_traceback; tstate->exc_traceback
= f->f_exc_traceback; f->f_exc_traceback = tmp; }
\
1129 { \
1130 PyObject *tmp; \
1131 tmp = tstate->exc_type; \
1132 tstate->exc_type = f->f_exc_type; \
1133 f->f_exc_type = tmp; \
1134 tmp = tstate->exc_value; \
1135 tstate->exc_value = f->f_exc_value; \
1136 f->f_exc_value = tmp; \
1137 tmp = tstate->exc_traceback; \
1138 tstate->exc_traceback = f->f_exc_traceback; \
1139 f->f_exc_traceback = tmp; \
1140 }
1141
1142/* Start of code */
1143
1144 if (f == NULL((void*)0))
1145 return NULL((void*)0);
1146
1147 /* push frame */
1148 if (Py_EnterRecursiveCall("")((++(PyThreadState_Get()->recursion_depth) > _Py_CheckRecursionLimit
) && _Py_CheckRecursiveCall(""))
)
1149 return NULL((void*)0);
1150
1151 tstate->frame = f;
1152
1153 if (tstate->use_tracing) {
1154 if (tstate->c_tracefunc != NULL((void*)0)) {
1155 /* tstate->c_tracefunc, if defined, is a
1156 function that will be called on *every* entry
1157 to a code block. Its return value, if not
1158 None, is a function that will be called at
1159 the start of each executed line of code.
1160 (Actually, the function must return itself
1161 in order to continue tracing.) The trace
1162 functions are called with three arguments:
1163 a pointer to the current frame, a string
1164 indicating why the function is called, and
1165 an argument which depends on the situation.
1166 The global trace function is also called
1167 whenever an exception is detected. */
1168 if (call_trace_protected(tstate->c_tracefunc,
1169 tstate->c_traceobj,
1170 f, PyTrace_CALL0, Py_None(&_Py_NoneStruct))) {
1171 /* Trace function raised an error */
1172 goto exit_eval_frame;
1173 }
1174 }
1175 if (tstate->c_profilefunc != NULL((void*)0)) {
1176 /* Similar for c_profilefunc, except it needn't
1177 return itself and isn't called for "line" events */
1178 if (call_trace_protected(tstate->c_profilefunc,
1179 tstate->c_profileobj,
1180 f, PyTrace_CALL0, Py_None(&_Py_NoneStruct))) {
1181 /* Profile function raised an error */
1182 goto exit_eval_frame;
1183 }
1184 }
1185 }
1186
1187 co = f->f_code;
1188 names = co->co_names;
1189 consts = co->co_consts;
1190 fastlocals = f->f_localsplus;
1191 freevars = f->f_localsplus + co->co_nlocals;
1192 first_instr = (unsigned char*) PyBytes_AS_STRING(co->co_code)((__builtin_expect(!(((((((PyObject*)(co->co_code))->ob_type
))->tp_flags & ((1L<<27))) != 0)), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1192, "PyBytes_Check(co->co_code)"
) : (void)0), (((PyBytesObject *)(co->co_code))->ob_sval
))
;
1193 /* An explanation is in order for the next line.
1194
1195 f->f_lasti now refers to the index of the last instruction
1196 executed. You might think this was obvious from the name, but
1197 this wasn't always true before 2.3! PyFrame_New now sets
1198 f->f_lasti to -1 (i.e. the index *before* the first instruction)
1199 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
1200 does work. Promise.
1201
1202 When the PREDICT() macros are enabled, some opcode pairs follow in
1203 direct succession without updating f->f_lasti. A successful
1204 prediction effectively links the two codes together as if they
1205 were a single new opcode; accordingly,f->f_lasti will point to
1206 the first code in the pair (for instance, GET_ITER followed by
1207 FOR_ITER is effectively a single opcode and f->f_lasti will point
1208 at to the beginning of the combined pair.)
1209 */
1210 next_instr = first_instr + f->f_lasti + 1;
1211 stack_pointer = f->f_stacktop;
1212 assert(stack_pointer != NULL)(__builtin_expect(!(stack_pointer != ((void*)0)), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1212, "stack_pointer != NULL") :
(void)0)
;
1213 f->f_stacktop = NULL((void*)0); /* remains NULL unless yield suspends frame */
1214
1215 if (co->co_flags & CO_GENERATOR0x0020 && !throwflag) {
1216 if (f->f_exc_type != NULL((void*)0) && f->f_exc_type != Py_None(&_Py_NoneStruct)) {
1217 /* We were in an except handler when we left,
1218 restore the exception state which was put aside
1219 (see YIELD_VALUE). */
1220 SWAP_EXC_STATE(){ PyObject *tmp; tmp = tstate->exc_type; tstate->exc_type
= f->f_exc_type; f->f_exc_type = tmp; tmp = tstate->
exc_value; tstate->exc_value = f->f_exc_value; f->f_exc_value
= tmp; tmp = tstate->exc_traceback; tstate->exc_traceback
= f->f_exc_traceback; f->f_exc_traceback = tmp; }
;
1221 }
1222 else {
1223 SAVE_EXC_STATE(){ PyObject *type, *value, *traceback; do { if ((tstate->exc_type
) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject*)(tstate
->exc_type))->ob_refcnt++); } while (0); do { if ((tstate
->exc_value) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(tstate->exc_value))->ob_refcnt++); } while (0); do {
if ((tstate->exc_traceback) == ((void*)0)) ; else ( _Py_RefTotal
++ , ((PyObject*)(tstate->exc_traceback))->ob_refcnt++)
; } while (0); type = f->f_exc_type; value = f->f_exc_value
; traceback = f->f_exc_traceback; f->f_exc_type = tstate
->exc_type; f->f_exc_value = tstate->exc_value; f->
f_exc_traceback = tstate->exc_traceback; do { if ((type) ==
((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject*)(
type))->ob_refcnt != 0) { if (((PyObject*)type)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 1223, (PyObject
*)(type)); } else _Py_Dealloc((PyObject *)(type)); } while (
0); } while (0); do { if ((value) == ((void*)0)) ; else do { if
(_Py_RefTotal-- , --((PyObject*)(value))->ob_refcnt != 0)
{ if (((PyObject*)value)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1223, (PyObject *)(value)); } else _Py_Dealloc
((PyObject *)(value)); } while (0); } while (0); do { if ((traceback
) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject
*)(traceback))->ob_refcnt != 0) { if (((PyObject*)traceback
)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1223, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0); }
;
1224 }
1225 }
1226
1227#ifdef LLTRACE1
1228 lltrace = PyDict_GetItemString(f->f_globals, "__lltrace__") != NULL((void*)0);
1229#endif
1230#if defined(Py_DEBUG1) || defined(LLTRACE1)
1231 {
1232 PyObject *error_type, *error_value, *error_traceback;
1233 PyErr_Fetch(&error_type, &error_value, &error_traceback);
1234 filename = _PyUnicode_AsString(co->co_filename);
1235 if (filename == NULL((void*)0) && tstate->overflowed) {
1236 /* maximum recursion depth exceeded */
1237 goto exit_eval_frame;
1238 }
1239 PyErr_Restore(error_type, error_value, error_traceback);
1240 }
1241#endif
1242
1243 why = WHY_NOT;
1244 err = 0;
1245 x = Py_None(&_Py_NoneStruct); /* Not a reference, just anything non-NULL */
1246 w = NULL((void*)0);
1247
1248 if (throwflag) { /* support for generator.throw() */
1249 why = WHY_EXCEPTION;
1250 goto on_error;
1251 }
1252
1253 for (;;) {
1254#ifdef WITH_TSC
1255 if (inst1 == 0) {
1256 /* Almost surely, the opcode executed a break
1257 or a continue, preventing inst1 from being set
1258 on the way out of the loop.
1259 */
1260 READ_TIMESTAMP(inst1);
1261 loop1 = inst1;
1262 }
1263 dump_tsc(opcode, ticked, inst0, inst1, loop0, loop1,
1264 intr0, intr1);
1265 ticked = 0;
1266 inst1 = 0;
1267 intr0 = 0;
1268 intr1 = 0;
1269 READ_TIMESTAMP(loop0);
1270#endif
1271 assert(stack_pointer >= f->f_valuestack)(__builtin_expect(!(stack_pointer >= f->f_valuestack), 0
) ? __assert_rtn(__func__, "Python/ceval.c", 1271, "stack_pointer >= f->f_valuestack"
) : (void)0)
; /* else underflow */
1272 assert(STACK_LEVEL() <= co->co_stacksize)(__builtin_expect(!(((int)(stack_pointer - f->f_valuestack
)) <= co->co_stacksize), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 1272, "STACK_LEVEL() <= co->co_stacksize") : (void)0)
; /* else overflow */
1273
1274 /* Do periodic things. Doing this every time through
1275 the loop would add too much overhead, so we do it
1276 only every Nth instruction. We also do it if
1277 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
1278 event needs attention (e.g. a signal handler or
1279 async I/O handler); see Py_AddPendingCall() and
1280 Py_MakePendingCalls() above. */
1281
1282 if (_Py_atomic_load_relaxed(&eval_breaker)__extension__ ({ __typeof__(&eval_breaker) atomic_val = &
eval_breaker; __typeof__(atomic_val->_value) result; volatile
__typeof__(result) *volatile_data = &atomic_val->_value
; _Py_memory_order order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER
(atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; })
) {
1283 if (*next_instr == SETUP_FINALLY122) {
1284 /* Make the last opcode before
1285 a try: finally: block uninterruptable. */
1286 goto fast_next_opcode;
1287 }
1288 tstate->tick_counter++;
1289#ifdef WITH_TSC
1290 ticked = 1;
1291#endif
1292 if (_Py_atomic_load_relaxed(&pendingcalls_to_do)__extension__ ({ __typeof__(&pendingcalls_to_do) atomic_val
= &pendingcalls_to_do; __typeof__(atomic_val->_value)
result; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
) {
1293 if (Py_MakePendingCalls() < 0) {
1294 why = WHY_EXCEPTION;
1295 goto on_error;
1296 }
1297 }
1298#ifdef WITH_THREAD1
1299 if (_Py_atomic_load_relaxed(&gil_drop_request)__extension__ ({ __typeof__(&gil_drop_request) atomic_val
= &gil_drop_request; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })
) {
1300 /* Give another thread a chance */
1301 if (PyThreadState_Swap(NULL((void*)0)) != tstate)
1302 Py_FatalError("ceval: tstate mix-up");
1303 drop_gil(tstate);
1304
1305 /* Other threads may run now */
1306
1307 take_gil(tstate);
1308 if (PyThreadState_Swap(tstate) != NULL((void*)0))
1309 Py_FatalError("ceval: orphan tstate");
1310 }
1311#endif
1312 /* Check for asynchronous exceptions. */
1313 if (tstate->async_exc != NULL((void*)0)) {
1314 x = tstate->async_exc;
1315 tstate->async_exc = NULL((void*)0);
1316 UNSIGNAL_ASYNC_EXC()do { pending_async_exc = 0; __extension__ ({ __typeof__(&
eval_breaker) atomic_val = &eval_breaker; __typeof__(atomic_val
->_value) new_val = __extension__ ({ __typeof__(&gil_drop_request
) atomic_val = &gil_drop_request; __typeof__(atomic_val->
_value) result; volatile __typeof__(result) *volatile_data = &
atomic_val->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; }) | __extension__ ({ __typeof__
(&pendingcalls_to_do) atomic_val = &pendingcalls_to_do
; __typeof__(atomic_val->_value) result; volatile __typeof__
(result) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_thread_fence(_Py_memory_order_release); break; default
: break; } result = *volatile_data; switch(order) { case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: _Py_atomic_signal_fence(_Py_memory_order_acquire); break; default
: break; } ; result; }) | pending_async_exc; volatile __typeof__
(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order
order = _Py_memory_order_relaxed; _Py_ANNOTATE_MEMORY_ORDER(
atomic_val, order); ; switch(order) { case _Py_memory_order_release
: _Py_atomic_signal_fence(_Py_memory_order_release); case _Py_memory_order_relaxed
: *volatile_data = new_val; break; case _Py_memory_order_acquire
: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst
: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val
->_value) : "memory"); break; } ; }); } while (0)
;
1317 PyErr_SetNone(x);
1318 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1318, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
1319 why = WHY_EXCEPTION;
1320 goto on_error;
1321 }
1322 }
1323
1324 fast_next_opcode:
1325 f->f_lasti = INSTR_OFFSET()((int)(next_instr - first_instr));
1326
1327 /* line-by-line tracing support */
1328
1329 if (_Py_TracingPossible &&
1330 tstate->c_tracefunc != NULL((void*)0) && !tstate->tracing) {
1331 /* see maybe_call_line_trace
1332 for expository comments */
1333 f->f_stacktop = stack_pointer;
1334
1335 err = maybe_call_line_trace(tstate->c_tracefunc,
1336 tstate->c_traceobj,
1337 f, &instr_lb, &instr_ub,
1338 &instr_prev);
1339 /* Reload possibly changed frame fields */
1340 JUMPTO(f->f_lasti)(next_instr = first_instr + (f->f_lasti));
1341 if (f->f_stacktop != NULL((void*)0)) {
1342 stack_pointer = f->f_stacktop;
1343 f->f_stacktop = NULL((void*)0);
1344 }
1345 if (err) {
1346 /* trace function raised an exception */
1347 goto on_error;
1348 }
1349 }
1350
1351 /* Extract opcode and argument */
1352
1353 opcode = NEXTOP()(*next_instr++);
1354 oparg = 0; /* allows oparg to be stored in a register because
1355 it doesn't have to be remembered across a full loop */
1356 if (HAS_ARG(opcode)((opcode) >= 90))
1357 oparg = NEXTARG()(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
1358 dispatch_opcode:
1359#ifdef DYNAMIC_EXECUTION_PROFILE
1360#ifdef DXPAIRS
1361 dxpairs[lastopcode][opcode]++;
1362 lastopcode = opcode;
1363#endif
1364 dxp[opcode]++;
1365#endif
1366
1367#ifdef LLTRACE1
1368 /* Instruction tracing */
1369
1370 if (lltrace) {
1371 if (HAS_ARG(opcode)((opcode) >= 90)) {
1372 printf("%d: %d, %d\n",
1373 f->f_lasti, opcode, oparg);
1374 }
1375 else {
1376 printf("%d: %d\n",
1377 f->f_lasti, opcode);
1378 }
1379 }
1380#endif
1381
1382 /* Main switch on opcode */
1383 READ_TIMESTAMP(inst0);
1384
1385 switch (opcode) {
1386
1387 /* BEWARE!
1388 It is essential that any operation that fails sets either
1389 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1390 and that no operation that succeeds does this! */
1391
1392 /* case STOP_CODE: this is an error! */
1393
1394 TARGET(NOP)TARGET_NOP: opcode = 9; if (((9) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 9:
1395 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1396
1397 TARGET(LOAD_FAST)TARGET_LOAD_FAST: opcode = 124; if (((124) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 124:
1398 x = GETLOCAL(oparg)(fastlocals[oparg]);
1399 if (x != NULL((void*)0)) {
1400 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
1401 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1401, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1402 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1403 }
1404 format_exc_check_arg(PyExc_UnboundLocalError,
1405 UNBOUNDLOCAL_ERROR_MSG"local variable '%.200s' referenced before assignment",
1406 PyTuple_GetItem(co->co_varnames, oparg));
1407 break;
1408
1409 TARGET(LOAD_CONST)TARGET_LOAD_CONST: opcode = 100; if (((100) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 100:
1410 x = GETITEM(consts, oparg)PyTuple_GetItem((consts), (oparg));
1411 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
1412 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1412, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1413 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1414
1415 PREDICTED_WITH_ARG(STORE_FAST)PRED_STORE_FAST:;
1416 TARGET(STORE_FAST)TARGET_STORE_FAST: opcode = 125; if (((125) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 125:
1417 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1418 SETLOCAL(oparg, v)do { PyObject *tmp = (fastlocals[oparg]); (fastlocals[oparg])
= v; do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject
*)tmp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1418, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
1419 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1420
1421 TARGET(POP_TOP)TARGET_POP_TOP: opcode = 1; if (((1) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 1:
1422 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1423 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1423, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1424 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1425
1426 TARGET(ROT_TWO)TARGET_ROT_TWO: opcode = 2; if (((2) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 2:
1427 v = TOP()(stack_pointer[-1]);
1428 w = SECOND()(stack_pointer[-2]);
1429 SET_TOP(w)(stack_pointer[-1] = (w));
1430 SET_SECOND(v)(stack_pointer[-2] = (v));
1431 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1432
1433 TARGET(ROT_THREE)TARGET_ROT_THREE: opcode = 3; if (((3) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 3:
1434 v = TOP()(stack_pointer[-1]);
1435 w = SECOND()(stack_pointer[-2]);
1436 x = THIRD()(stack_pointer[-3]);
1437 SET_TOP(w)(stack_pointer[-1] = (w));
1438 SET_SECOND(x)(stack_pointer[-2] = (x));
1439 SET_THIRD(v)(stack_pointer[-3] = (v));
1440 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1441
1442 TARGET(DUP_TOP)TARGET_DUP_TOP: opcode = 4; if (((4) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 4:
1443 v = TOP()(stack_pointer[-1]);
1444 Py_INCREF(v)( _Py_RefTotal++ , ((PyObject*)(v))->ob_refcnt++);
1445 PUSH(v){ (void)((*stack_pointer++ = (v)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1445, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1446 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1447
1448 TARGET(DUP_TOP_TWO)TARGET_DUP_TOP_TWO: opcode = 5; if (((5) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 5:
1449 x = TOP()(stack_pointer[-1]);
1450 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
1451 w = SECOND()(stack_pointer[-2]);
1452 Py_INCREF(w)( _Py_RefTotal++ , ((PyObject*)(w))->ob_refcnt++);
1453 STACKADJ(2){ (void)((stack_pointer += 2), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1453, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1454 SET_TOP(x)(stack_pointer[-1] = (x));
1455 SET_SECOND(w)(stack_pointer[-2] = (w));
1456 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
1457
1458 TARGET(UNARY_POSITIVE)TARGET_UNARY_POSITIVE: opcode = 10; if (((10) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 10:
1459 v = TOP()(stack_pointer[-1]);
1460 x = PyNumber_Positive(v);
1461 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1461, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1462 SET_TOP(x)(stack_pointer[-1] = (x));
1463 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1464 break;
1465
1466 TARGET(UNARY_NEGATIVE)TARGET_UNARY_NEGATIVE: opcode = 11; if (((11) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 11:
1467 v = TOP()(stack_pointer[-1]);
1468 x = PyNumber_Negative(v);
1469 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1469, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1470 SET_TOP(x)(stack_pointer[-1] = (x));
1471 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1472 break;
1473
1474 TARGET(UNARY_NOT)TARGET_UNARY_NOT: opcode = 12; if (((12) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 12:
1475 v = TOP()(stack_pointer[-1]);
1476 err = PyObject_IsTrue(v);
1477 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1477, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1478 if (err == 0) {
1479 Py_INCREF(Py_True)( _Py_RefTotal++ , ((PyObject*)(((PyObject *) &_Py_TrueStruct
)))->ob_refcnt++)
;
1480 SET_TOP(Py_True)(stack_pointer[-1] = (((PyObject *) &_Py_TrueStruct)));
1481 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1482 }
1483 else if (err > 0) {
1484 Py_INCREF(Py_False)( _Py_RefTotal++ , ((PyObject*)(((PyObject *) &_Py_FalseStruct
)))->ob_refcnt++)
;
1485 SET_TOP(Py_False)(stack_pointer[-1] = (((PyObject *) &_Py_FalseStruct)));
1486 err = 0;
1487 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1488 }
1489 STACKADJ(-1){ (void)((stack_pointer += -1), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1489, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1490 break;
1491
1492 TARGET(UNARY_INVERT)TARGET_UNARY_INVERT: opcode = 15; if (((15) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 15:
1493 v = TOP()(stack_pointer[-1]);
1494 x = PyNumber_Invert(v);
1495 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1495, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1496 SET_TOP(x)(stack_pointer[-1] = (x));
1497 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1498 break;
1499
1500 TARGET(BINARY_POWER)TARGET_BINARY_POWER: opcode = 19; if (((19) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 19:
1501 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1502 v = TOP()(stack_pointer[-1]);
1503 x = PyNumber_Power(v, w, Py_None(&_Py_NoneStruct));
1504 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1504, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1505 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1505, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1506 SET_TOP(x)(stack_pointer[-1] = (x));
1507 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1508 break;
1509
1510 TARGET(BINARY_MULTIPLY)TARGET_BINARY_MULTIPLY: opcode = 20; if (((20) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 20:
1511 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1512 v = TOP()(stack_pointer[-1]);
1513 x = PyNumber_Multiply(v, w);
1514 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1514, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1515 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1515, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1516 SET_TOP(x)(stack_pointer[-1] = (x));
1517 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1518 break;
1519
1520 TARGET(BINARY_TRUE_DIVIDE)TARGET_BINARY_TRUE_DIVIDE: opcode = 27; if (((27) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 27:
1521 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1522 v = TOP()(stack_pointer[-1]);
1523 x = PyNumber_TrueDivide(v, w);
1524 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1524, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1525 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1525, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1526 SET_TOP(x)(stack_pointer[-1] = (x));
1527 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1528 break;
1529
1530 TARGET(BINARY_FLOOR_DIVIDE)TARGET_BINARY_FLOOR_DIVIDE: opcode = 26; if (((26) >= 90))
oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 26:
1531 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1532 v = TOP()(stack_pointer[-1]);
1533 x = PyNumber_FloorDivide(v, w);
1534 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1534, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1535 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1535, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1536 SET_TOP(x)(stack_pointer[-1] = (x));
1537 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1538 break;
1539
1540 TARGET(BINARY_MODULO)TARGET_BINARY_MODULO: opcode = 22; if (((22) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 22:
1541 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1542 v = TOP()(stack_pointer[-1]);
1543 if (PyUnicode_CheckExact(v)((((PyObject*)(v))->ob_type) == &PyUnicode_Type))
1544 x = PyUnicode_FormatPyUnicodeUCS2_Format(v, w);
1545 else
1546 x = PyNumber_Remainder(v, w);
1547 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1547, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1548 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1548, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1549 SET_TOP(x)(stack_pointer[-1] = (x));
1550 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1551 break;
1552
1553 TARGET(BINARY_ADD)TARGET_BINARY_ADD: opcode = 23; if (((23) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 23:
1554 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1555 v = TOP()(stack_pointer[-1]);
1556 if (PyUnicode_CheckExact(v)((((PyObject*)(v))->ob_type) == &PyUnicode_Type) &&
1557 PyUnicode_CheckExact(w)((((PyObject*)(w))->ob_type) == &PyUnicode_Type)) {
1558 x = unicode_concatenate(v, w, f, next_instr);
1559 /* unicode_concatenate consumed the ref to v */
1560 goto skip_decref_vx;
1561 }
1562 else {
1563 x = PyNumber_Add(v, w);
1564 }
1565 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1565, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1566 skip_decref_vx:
1567 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1567, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1568 SET_TOP(x)(stack_pointer[-1] = (x));
1569 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1570 break;
1571
1572 TARGET(BINARY_SUBTRACT)TARGET_BINARY_SUBTRACT: opcode = 24; if (((24) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 24:
1573 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1574 v = TOP()(stack_pointer[-1]);
1575 x = PyNumber_Subtract(v, w);
1576 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1576, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1577 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1577, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1578 SET_TOP(x)(stack_pointer[-1] = (x));
1579 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1580 break;
1581
1582 TARGET(BINARY_SUBSCR)TARGET_BINARY_SUBSCR: opcode = 25; if (((25) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 25:
1583 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1584 v = TOP()(stack_pointer[-1]);
1585 x = PyObject_GetItem(v, w);
1586 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1586, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1587 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1587, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1588 SET_TOP(x)(stack_pointer[-1] = (x));
1589 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1590 break;
1591
1592 TARGET(BINARY_LSHIFT)TARGET_BINARY_LSHIFT: opcode = 62; if (((62) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 62:
1593 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1594 v = TOP()(stack_pointer[-1]);
1595 x = PyNumber_Lshift(v, w);
1596 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1596, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1597 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1597, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1598 SET_TOP(x)(stack_pointer[-1] = (x));
1599 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1600 break;
1601
1602 TARGET(BINARY_RSHIFT)TARGET_BINARY_RSHIFT: opcode = 63; if (((63) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 63:
1603 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1604 v = TOP()(stack_pointer[-1]);
1605 x = PyNumber_Rshift(v, w);
1606 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1606, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1607 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1607, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1608 SET_TOP(x)(stack_pointer[-1] = (x));
1609 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1610 break;
1611
1612 TARGET(BINARY_AND)TARGET_BINARY_AND: opcode = 64; if (((64) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 64:
1613 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1614 v = TOP()(stack_pointer[-1]);
1615 x = PyNumber_And(v, w);
1616 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1616, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1617 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1617, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1618 SET_TOP(x)(stack_pointer[-1] = (x));
1619 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1620 break;
1621
1622 TARGET(BINARY_XOR)TARGET_BINARY_XOR: opcode = 65; if (((65) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 65:
1623 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1624 v = TOP()(stack_pointer[-1]);
1625 x = PyNumber_Xor(v, w);
1626 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1626, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1627 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1627, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1628 SET_TOP(x)(stack_pointer[-1] = (x));
1629 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1630 break;
1631
1632 TARGET(BINARY_OR)TARGET_BINARY_OR: opcode = 66; if (((66) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 66:
1633 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1634 v = TOP()(stack_pointer[-1]);
1635 x = PyNumber_Or(v, w);
1636 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1636, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1637 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1637, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1638 SET_TOP(x)(stack_pointer[-1] = (x));
1639 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1640 break;
1641
1642 TARGET(LIST_APPEND)TARGET_LIST_APPEND: opcode = 145; if (((145) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 145:
1643 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1644 v = PEEK(oparg)(stack_pointer[-(oparg)]);
1645 err = PyList_Append(v, w);
1646 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1646, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1647 if (err == 0) {
1648 PREDICT(JUMP_ABSOLUTE)if (0) goto PRED_JUMP_ABSOLUTE;
1649 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1650 }
1651 break;
1652
1653 TARGET(SET_ADD)TARGET_SET_ADD: opcode = 146; if (((146) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 146:
1654 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1655 v = stack_pointer[-oparg];
1656 err = PySet_Add(v, w);
1657 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1657, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1658 if (err == 0) {
1659 PREDICT(JUMP_ABSOLUTE)if (0) goto PRED_JUMP_ABSOLUTE;
1660 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1661 }
1662 break;
1663
1664 TARGET(INPLACE_POWER)TARGET_INPLACE_POWER: opcode = 67; if (((67) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 67:
1665 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1666 v = TOP()(stack_pointer[-1]);
1667 x = PyNumber_InPlacePower(v, w, Py_None(&_Py_NoneStruct));
1668 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1668, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1669 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1669, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1670 SET_TOP(x)(stack_pointer[-1] = (x));
1671 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1672 break;
1673
1674 TARGET(INPLACE_MULTIPLY)TARGET_INPLACE_MULTIPLY: opcode = 57; if (((57) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 57:
1675 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1676 v = TOP()(stack_pointer[-1]);
1677 x = PyNumber_InPlaceMultiply(v, w);
1678 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1678, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1679 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1679, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1680 SET_TOP(x)(stack_pointer[-1] = (x));
1681 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1682 break;
1683
1684 TARGET(INPLACE_TRUE_DIVIDE)TARGET_INPLACE_TRUE_DIVIDE: opcode = 29; if (((29) >= 90))
oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 29:
1685 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1686 v = TOP()(stack_pointer[-1]);
1687 x = PyNumber_InPlaceTrueDivide(v, w);
1688 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1688, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1689 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1689, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1690 SET_TOP(x)(stack_pointer[-1] = (x));
1691 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1692 break;
1693
1694 TARGET(INPLACE_FLOOR_DIVIDE)TARGET_INPLACE_FLOOR_DIVIDE: opcode = 28; if (((28) >= 90)
) oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 28:
1695 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1696 v = TOP()(stack_pointer[-1]);
1697 x = PyNumber_InPlaceFloorDivide(v, w);
1698 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1698, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1699 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1699, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1700 SET_TOP(x)(stack_pointer[-1] = (x));
1701 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1702 break;
1703
1704 TARGET(INPLACE_MODULO)TARGET_INPLACE_MODULO: opcode = 59; if (((59) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 59:
1705 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1706 v = TOP()(stack_pointer[-1]);
1707 x = PyNumber_InPlaceRemainder(v, w);
1708 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1708, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1709 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1709, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1710 SET_TOP(x)(stack_pointer[-1] = (x));
1711 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1712 break;
1713
1714 TARGET(INPLACE_ADD)TARGET_INPLACE_ADD: opcode = 55; if (((55) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 55:
1715 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1716 v = TOP()(stack_pointer[-1]);
1717 if (PyUnicode_CheckExact(v)((((PyObject*)(v))->ob_type) == &PyUnicode_Type) &&
1718 PyUnicode_CheckExact(w)((((PyObject*)(w))->ob_type) == &PyUnicode_Type)) {
1719 x = unicode_concatenate(v, w, f, next_instr);
1720 /* unicode_concatenate consumed the ref to v */
1721 goto skip_decref_v;
1722 }
1723 else {
1724 x = PyNumber_InPlaceAdd(v, w);
1725 }
1726 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1726, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1727 skip_decref_v:
1728 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1728, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1729 SET_TOP(x)(stack_pointer[-1] = (x));
1730 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1731 break;
1732
1733 TARGET(INPLACE_SUBTRACT)TARGET_INPLACE_SUBTRACT: opcode = 56; if (((56) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 56:
1734 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1735 v = TOP()(stack_pointer[-1]);
1736 x = PyNumber_InPlaceSubtract(v, w);
1737 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1737, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1738 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1738, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1739 SET_TOP(x)(stack_pointer[-1] = (x));
1740 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1741 break;
1742
1743 TARGET(INPLACE_LSHIFT)TARGET_INPLACE_LSHIFT: opcode = 75; if (((75) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 75:
1744 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1745 v = TOP()(stack_pointer[-1]);
1746 x = PyNumber_InPlaceLshift(v, w);
1747 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1747, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1748 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1748, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1749 SET_TOP(x)(stack_pointer[-1] = (x));
1750 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1751 break;
1752
1753 TARGET(INPLACE_RSHIFT)TARGET_INPLACE_RSHIFT: opcode = 76; if (((76) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 76:
1754 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1755 v = TOP()(stack_pointer[-1]);
1756 x = PyNumber_InPlaceRshift(v, w);
1757 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1757, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1758 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1758, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1759 SET_TOP(x)(stack_pointer[-1] = (x));
1760 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1761 break;
1762
1763 TARGET(INPLACE_AND)TARGET_INPLACE_AND: opcode = 77; if (((77) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 77:
1764 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1765 v = TOP()(stack_pointer[-1]);
1766 x = PyNumber_InPlaceAnd(v, w);
1767 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1767, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1768 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1768, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1769 SET_TOP(x)(stack_pointer[-1] = (x));
1770 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1771 break;
1772
1773 TARGET(INPLACE_XOR)TARGET_INPLACE_XOR: opcode = 78; if (((78) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 78:
1774 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1775 v = TOP()(stack_pointer[-1]);
1776 x = PyNumber_InPlaceXor(v, w);
1777 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1777, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1778 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1778, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1779 SET_TOP(x)(stack_pointer[-1] = (x));
1780 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1781 break;
1782
1783 TARGET(INPLACE_OR)TARGET_INPLACE_OR: opcode = 79; if (((79) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 79:
1784 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1785 v = TOP()(stack_pointer[-1]);
1786 x = PyNumber_InPlaceOr(v, w);
1787 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1787, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1788 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1788, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1789 SET_TOP(x)(stack_pointer[-1] = (x));
1790 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1791 break;
1792
1793 TARGET(STORE_SUBSCR)TARGET_STORE_SUBSCR: opcode = 60; if (((60) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 60:
1794 w = TOP()(stack_pointer[-1]);
1795 v = SECOND()(stack_pointer[-2]);
1796 u = THIRD()(stack_pointer[-3]);
1797 STACKADJ(-3){ (void)((stack_pointer += -3), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1797, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1798 /* v[w] = u */
1799 err = PyObject_SetItem(v, w, u);
1800 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1800, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
1801 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1801, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1802 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1802, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1803 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1804 break;
1805
1806 TARGET(DELETE_SUBSCR)TARGET_DELETE_SUBSCR: opcode = 61; if (((61) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 61:
1807 w = TOP()(stack_pointer[-1]);
1808 v = SECOND()(stack_pointer[-2]);
1809 STACKADJ(-2){ (void)((stack_pointer += -2), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1809, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1810 /* del v[w] */
1811 err = PyObject_DelItem(v, w);
1812 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1812, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1813 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1813, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
1814 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1815 break;
1816
1817 TARGET(PRINT_EXPR)TARGET_PRINT_EXPR: opcode = 70; if (((70) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 70:
1818 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1819 w = PySys_GetObject("displayhook");
1820 if (w == NULL((void*)0)) {
1821 PyErr_SetString(PyExc_RuntimeError,
1822 "lost sys.displayhook");
1823 err = -1;
1824 x = NULL((void*)0);
1825 }
1826 if (err == 0) {
1827 x = PyTuple_Pack(1, v);
1828 if (x == NULL((void*)0))
1829 err = -1;
1830 }
1831 if (err == 0) {
1832 w = PyEval_CallObject(w, x)PyEval_CallObjectWithKeywords(w, x, (PyObject *)((void*)0));
1833 Py_XDECREF(w)do { if ((w) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(w))->ob_refcnt != 0) { if (((PyObject*)w)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 1833
, (PyObject *)(w)); } else _Py_Dealloc((PyObject *)(w)); } while
(0); } while (0)
;
1834 if (w == NULL((void*)0))
1835 err = -1;
1836 }
1837 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1837, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1838 Py_XDECREF(x)do { if ((x) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(x))->ob_refcnt != 0) { if (((PyObject*)x)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 1838
, (PyObject *)(x)); } else _Py_Dealloc((PyObject *)(x)); } while
(0); } while (0)
;
1839 break;
1840
1841#ifdef CASE_TOO_BIG
1842 default: switch (opcode) {
1843#endif
1844 TARGET(RAISE_VARARGS)TARGET_RAISE_VARARGS: opcode = 130; if (((130) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 130:
1845 v = w = NULL((void*)0);
1846 switch (oparg) {
1847 case 2:
1848 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; /* cause */
1849 case 1:
1850 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; /* exc */
1851 case 0: /* Fallthrough */
1852 why = do_raise(w, v);
1853 break;
1854 default:
1855 PyErr_SetString(PyExc_SystemError,
1856 "bad RAISE_VARARGS oparg");
1857 why = WHY_EXCEPTION;
1858 break;
1859 }
1860 break;
1861
1862 TARGET(STORE_LOCALS)TARGET_STORE_LOCALS: opcode = 69; if (((69) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 69:
1863 x = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1864 v = f->f_locals;
1865 Py_XDECREF(v)do { if ((v) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(v))->ob_refcnt != 0) { if (((PyObject*)v)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 1865
, (PyObject *)(v)); } else _Py_Dealloc((PyObject *)(v)); } while
(0); } while (0)
;
1866 f->f_locals = x;
1867 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1868
1869 TARGET(RETURN_VALUE)TARGET_RETURN_VALUE: opcode = 83; if (((83) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 83:
1870 retval = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1871 why = WHY_RETURN;
1872 goto fast_block_end;
1873
1874 TARGET(YIELD_VALUE)TARGET_YIELD_VALUE: opcode = 86; if (((86) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 86:
1875 retval = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1876 f->f_stacktop = stack_pointer;
1877 why = WHY_YIELD;
1878 /* Put aside the current exception state and restore
1879 that of the calling frame. This only serves when
1880 "yield" is used inside an except handler. */
1881 SWAP_EXC_STATE(){ PyObject *tmp; tmp = tstate->exc_type; tstate->exc_type
= f->f_exc_type; f->f_exc_type = tmp; tmp = tstate->
exc_value; tstate->exc_value = f->f_exc_value; f->f_exc_value
= tmp; tmp = tstate->exc_traceback; tstate->exc_traceback
= f->f_exc_traceback; f->f_exc_traceback = tmp; }
;
1882 goto fast_yield;
1883
1884 TARGET(POP_EXCEPT)TARGET_POP_EXCEPT: opcode = 89; if (((89) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 89:
1885 {
1886 PyTryBlock *b = PyFrame_BlockPop(f);
1887 if (b->b_type != EXCEPT_HANDLER257) {
1888 PyErr_SetString(PyExc_SystemError,
1889 "popped block is not an except handler");
1890 why = WHY_EXCEPTION;
1891 break;
1892 }
1893 UNWIND_EXCEPT_HANDLER(b){ PyObject *type, *value, *traceback; (__builtin_expect(!(((int
)(stack_pointer - f->f_valuestack)) >= (b)->b_level +
3), 0) ? __assert_rtn(__func__, "Python/ceval.c", 1893, "STACK_LEVEL() >= (b)->b_level + 3"
) : (void)0); while (((int)(stack_pointer - f->f_valuestack
)) > (b)->b_level + 3) { value = ((void)(lltrace &&
prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer)); do
{ if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal-- ,
--((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject*
)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1893, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0); } type = tstate->exc_type
; value = tstate->exc_value; traceback = tstate->exc_traceback
; tstate->exc_type = ((void)(lltrace && prtrace((stack_pointer
[-1]), "pop")), (*--stack_pointer)); tstate->exc_value = (
(void)(lltrace && prtrace((stack_pointer[-1]), "pop")
), (*--stack_pointer)); tstate->exc_traceback = ((void)(lltrace
&& prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer
)); do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1893, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0); do { if ((value) == ((void*
)0)) ; else do { if (_Py_RefTotal-- , --((PyObject*)(value))->
ob_refcnt != 0) { if (((PyObject*)value)->ob_refcnt < 0
) _Py_NegativeRefcount("Python/ceval.c", 1893, (PyObject *)(value
)); } else _Py_Dealloc((PyObject *)(value)); } while (0); } while
(0); do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1893, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0); }
;
1894 }
1895 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1896
1897 TARGET(POP_BLOCK)TARGET_POP_BLOCK: opcode = 87; if (((87) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 87:
1898 {
1899 PyTryBlock *b = PyFrame_BlockPop(f);
1900 UNWIND_BLOCK(b)while (((int)(stack_pointer - f->f_valuestack)) > (b)->
b_level) { PyObject *v = ((void)(lltrace && prtrace((
stack_pointer[-1]), "pop")), (*--stack_pointer)); do { if ((v
) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject
*)(v))->ob_refcnt != 0) { if (((PyObject*)v)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 1900, (PyObject
*)(v)); } else _Py_Dealloc((PyObject *)(v)); } while (0); } while
(0); }
;
1901 }
1902 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1903
1904 PREDICTED(END_FINALLY)PRED_END_FINALLY:;
1905 TARGET(END_FINALLY)TARGET_END_FINALLY: opcode = 88; if (((88) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 88:
1906 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1907 if (PyLong_Check(v)((((((PyObject*)(v))->ob_type))->tp_flags & ((1L<<
24))) != 0)
) {
1908 why = (enum why_code) PyLong_AS_LONG(v)PyLong_AsLong(v);
1909 assert(why != WHY_YIELD)(__builtin_expect(!(why != WHY_YIELD), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 1909, "why != WHY_YIELD") : (void)0)
;
1910 if (why == WHY_RETURN ||
1911 why == WHY_CONTINUE)
1912 retval = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1913 if (why == WHY_SILENCED) {
1914 /* An exception was silenced by 'with', we must
1915 manually unwind the EXCEPT_HANDLER block which was
1916 created when the exception was caught, otherwise
1917 the stack will be in an inconsistent state. */
1918 PyTryBlock *b = PyFrame_BlockPop(f);
1919 assert(b->b_type == EXCEPT_HANDLER)(__builtin_expect(!(b->b_type == 257), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 1919, "b->b_type == EXCEPT_HANDLER") :
(void)0)
;
1920 UNWIND_EXCEPT_HANDLER(b){ PyObject *type, *value, *traceback; (__builtin_expect(!(((int
)(stack_pointer - f->f_valuestack)) >= (b)->b_level +
3), 0) ? __assert_rtn(__func__, "Python/ceval.c", 1920, "STACK_LEVEL() >= (b)->b_level + 3"
) : (void)0); while (((int)(stack_pointer - f->f_valuestack
)) > (b)->b_level + 3) { value = ((void)(lltrace &&
prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer)); do
{ if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal-- ,
--((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject*
)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1920, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0); } type = tstate->exc_type
; value = tstate->exc_value; traceback = tstate->exc_traceback
; tstate->exc_type = ((void)(lltrace && prtrace((stack_pointer
[-1]), "pop")), (*--stack_pointer)); tstate->exc_value = (
(void)(lltrace && prtrace((stack_pointer[-1]), "pop")
), (*--stack_pointer)); tstate->exc_traceback = ((void)(lltrace
&& prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer
)); do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1920, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0); do { if ((value) == ((void*
)0)) ; else do { if (_Py_RefTotal-- , --((PyObject*)(value))->
ob_refcnt != 0) { if (((PyObject*)value)->ob_refcnt < 0
) _Py_NegativeRefcount("Python/ceval.c", 1920, (PyObject *)(value
)); } else _Py_Dealloc((PyObject *)(value)); } while (0); } while
(0); do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 1920, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0); }
;
1921 why = WHY_NOT;
1922 }
1923 }
1924 else if (PyExceptionClass_Check(v)(((((((PyObject*)((v)))->ob_type))->tp_flags & ((1L
<<31))) != 0) && ((((PyTypeObject*)(v))->tp_flags
& ((1L<<30))) != 0))
) {
1925 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1926 u = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1927 PyErr_Restore(v, w, u);
1928 why = WHY_RERAISE;
1929 break;
1930 }
1931 else if (v != Py_None(&_Py_NoneStruct)) {
1932 PyErr_SetString(PyExc_SystemError,
1933 "'finally' pops bad exception");
1934 why = WHY_EXCEPTION;
1935 }
1936 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1936, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1937 break;
1938
1939 TARGET(LOAD_BUILD_CLASS)TARGET_LOAD_BUILD_CLASS: opcode = 71; if (((71) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 71:
1940 x = PyDict_GetItemString(f->f_builtins,
1941 "__build_class__");
1942 if (x == NULL((void*)0)) {
1943 PyErr_SetString(PyExc_ImportError,
1944 "__build_class__ not found");
1945 break;
1946 }
1947 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
1948 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1948, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1949 break;
1950
1951 TARGET(STORE_NAME)TARGET_STORE_NAME: opcode = 90; if (((90) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 90:
1952 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
1953 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1954 if ((x = f->f_locals) != NULL((void*)0)) {
1955 if (PyDict_CheckExact(x)((((PyObject*)(x))->ob_type) == &PyDict_Type))
1956 err = PyDict_SetItem(x, w, v);
1957 else
1958 err = PyObject_SetItem(x, w, v);
1959 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1959, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1960 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1961 break;
1962 }
1963 PyErr_Format(PyExc_SystemError,
1964 "no locals found when storing %R", w);
1965 break;
1966
1967 TARGET(DELETE_NAME)TARGET_DELETE_NAME: opcode = 91; if (((91) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 91:
1968 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
1969 if ((x = f->f_locals) != NULL((void*)0)) {
1970 if ((err = PyObject_DelItem(x, w)) != 0)
1971 format_exc_check_arg(PyExc_NameError,
1972 NAME_ERROR_MSG"name '%.200s' is not defined",
1973 w);
1974 break;
1975 }
1976 PyErr_Format(PyExc_SystemError,
1977 "no locals when deleting %R", w);
1978 break;
1979
1980 PREDICTED_WITH_ARG(UNPACK_SEQUENCE)PRED_UNPACK_SEQUENCE:;
1981 TARGET(UNPACK_SEQUENCE)TARGET_UNPACK_SEQUENCE: opcode = 92; if (((92) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 92:
1982 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
1983 if (PyTuple_CheckExact(v)((((PyObject*)(v))->ob_type) == &PyTuple_Type) &&
1984 PyTuple_GET_SIZE(v)(((PyVarObject*)(v))->ob_size) == oparg) {
1985 PyObject **items = \
1986 ((PyTupleObject *)v)->ob_item;
1987 while (oparg--) {
1988 w = items[oparg];
1989 Py_INCREF(w)( _Py_RefTotal++ , ((PyObject*)(w))->ob_refcnt++);
1990 PUSH(w){ (void)((*stack_pointer++ = (w)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 1990, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
1991 }
1992 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 1992, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
1993 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
1994 } else if (PyList_CheckExact(v)((((PyObject*)(v))->ob_type) == &PyList_Type) &&
1995 PyList_GET_SIZE(v)(((PyVarObject*)(v))->ob_size) == oparg) {
1996 PyObject **items = \
1997 ((PyListObject *)v)->ob_item;
1998 while (oparg--) {
1999 w = items[oparg];
2000 Py_INCREF(w)( _Py_RefTotal++ , ((PyObject*)(w))->ob_refcnt++);
2001 PUSH(w){ (void)((*stack_pointer++ = (w)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2001, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2002 }
2003 } else if (unpack_iterable(v, oparg, -1,
2004 stack_pointer + oparg)) {
2005 STACKADJ(oparg){ (void)((stack_pointer += oparg), lltrace && prtrace
((stack_pointer[-1]), "stackadj")); (__builtin_expect(!(((int
)(stack_pointer - f->f_valuestack)) <= co->co_stacksize
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2005, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2006 } else {
2007 /* unpack_iterable() raised an exception */
2008 why = WHY_EXCEPTION;
2009 }
2010 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2010, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2011 break;
2012
2013 TARGET(UNPACK_EX)TARGET_UNPACK_EX: opcode = 94; if (((94) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 94:
2014 {
2015 int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
2016 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2017
2018 if (unpack_iterable(v, oparg & 0xFF, oparg >> 8,
2019 stack_pointer + totalargs)) {
2020 stack_pointer += totalargs;
2021 } else {
2022 why = WHY_EXCEPTION;
2023 }
2024 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2024, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2025 break;
2026 }
2027
2028 TARGET(STORE_ATTR)TARGET_STORE_ATTR: opcode = 95; if (((95) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 95:
2029 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2030 v = TOP()(stack_pointer[-1]);
2031 u = SECOND()(stack_pointer[-2]);
2032 STACKADJ(-2){ (void)((stack_pointer += -2), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2032, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2033 err = PyObject_SetAttr(v, w, u); /* v.w = u */
2034 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2034, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2035 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2035, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2036 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2037 break;
2038
2039 TARGET(DELETE_ATTR)TARGET_DELETE_ATTR: opcode = 96; if (((96) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 96:
2040 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2041 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2042 err = PyObject_SetAttr(v, w, (PyObject *)NULL((void*)0));
2043 /* del v.w */
2044 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2044, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2045 break;
2046
2047 TARGET(STORE_GLOBAL)TARGET_STORE_GLOBAL: opcode = 97; if (((97) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 97:
2048 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2049 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2050 err = PyDict_SetItem(f->f_globals, w, v);
2051 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2051, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2052 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2053 break;
2054
2055 TARGET(DELETE_GLOBAL)TARGET_DELETE_GLOBAL: opcode = 98; if (((98) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 98:
2056 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2057 if ((err = PyDict_DelItem(f->f_globals, w)) != 0)
2058 format_exc_check_arg(
2059 PyExc_NameError, GLOBAL_NAME_ERROR_MSG"global name '%.200s' is not defined", w);
2060 break;
2061
2062 TARGET(LOAD_NAME)TARGET_LOAD_NAME: opcode = 101; if (((101) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 101:
2063 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2064 if ((v = f->f_locals) == NULL((void*)0)) {
2065 PyErr_Format(PyExc_SystemError,
2066 "no locals when loading %R", w);
2067 why = WHY_EXCEPTION;
2068 break;
2069 }
2070 if (PyDict_CheckExact(v)((((PyObject*)(v))->ob_type) == &PyDict_Type)) {
2071 x = PyDict_GetItem(v, w);
2072 Py_XINCREF(x)do { if ((x) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(x))->ob_refcnt++); } while (0)
;
2073 }
2074 else {
2075 x = PyObject_GetItem(v, w);
2076 if (x == NULL((void*)0) && PyErr_Occurred()) {
2077 if (!PyErr_ExceptionMatches(
2078 PyExc_KeyError))
2079 break;
2080 PyErr_Clear();
2081 }
2082 }
2083 if (x == NULL((void*)0)) {
2084 x = PyDict_GetItem(f->f_globals, w);
2085 if (x == NULL((void*)0)) {
2086 x = PyDict_GetItem(f->f_builtins, w);
2087 if (x == NULL((void*)0)) {
2088 format_exc_check_arg(
2089 PyExc_NameError,
2090 NAME_ERROR_MSG"name '%.200s' is not defined", w);
2091 break;
2092 }
2093 }
2094 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
2095 }
2096 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2096, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2097 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2098
2099 TARGET(LOAD_GLOBAL)TARGET_LOAD_GLOBAL: opcode = 116; if (((116) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 116:
2100 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2101 if (PyUnicode_CheckExact(w)((((PyObject*)(w))->ob_type) == &PyUnicode_Type)) {
2102 /* Inline the PyDict_GetItem() calls.
2103 WARNING: this is an extreme speed hack.
2104 Do not try this at home. */
2105 Py_hash_t hash = ((PyUnicodeObject *)w)->hash;
2106 if (hash != -1) {
2107 PyDictObject *d;
2108 PyDictEntry *e;
2109 d = (PyDictObject *)(f->f_globals);
2110 e = d->ma_lookup(d, w, hash);
2111 if (e == NULL((void*)0)) {
2112 x = NULL((void*)0);
2113 break;
2114 }
2115 x = e->me_value;
2116 if (x != NULL((void*)0)) {
2117 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
2118 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2118, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2119 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2120 }
2121 d = (PyDictObject *)(f->f_builtins);
2122 e = d->ma_lookup(d, w, hash);
2123 if (e == NULL((void*)0)) {
2124 x = NULL((void*)0);
2125 break;
2126 }
2127 x = e->me_value;
2128 if (x != NULL((void*)0)) {
2129 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
2130 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2130, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2131 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2132 }
2133 goto load_global_error;
2134 }
2135 }
2136 /* This is the un-inlined version of the code above */
2137 x = PyDict_GetItem(f->f_globals, w);
2138 if (x == NULL((void*)0)) {
2139 x = PyDict_GetItem(f->f_builtins, w);
2140 if (x == NULL((void*)0)) {
2141 load_global_error:
2142 format_exc_check_arg(
2143 PyExc_NameError,
2144 GLOBAL_NAME_ERROR_MSG"global name '%.200s' is not defined", w);
2145 break;
2146 }
2147 }
2148 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
2149 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2149, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2150 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2151
2152 TARGET(DELETE_FAST)TARGET_DELETE_FAST: opcode = 126; if (((126) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 126:
2153 x = GETLOCAL(oparg)(fastlocals[oparg]);
2154 if (x != NULL((void*)0)) {
2155 SETLOCAL(oparg, NULL)do { PyObject *tmp = (fastlocals[oparg]); (fastlocals[oparg])
= ((void*)0); do { if ((tmp) == ((void*)0)) ; else do { if (
_Py_RefTotal-- , --((PyObject*)(tmp))->ob_refcnt != 0) { if
(((PyObject*)tmp)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2155, (PyObject *)(tmp)); } else _Py_Dealloc
((PyObject *)(tmp)); } while (0); } while (0); } while (0)
;
2156 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2157 }
2158 format_exc_check_arg(
2159 PyExc_UnboundLocalError,
2160 UNBOUNDLOCAL_ERROR_MSG"local variable '%.200s' referenced before assignment",
2161 PyTuple_GetItem(co->co_varnames, oparg)
2162 );
2163 break;
2164
2165 TARGET(DELETE_DEREF)TARGET_DELETE_DEREF: opcode = 138; if (((138) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 138:
2166 x = freevars[oparg];
2167 if (PyCell_GET(x)(((PyCellObject *)(x))->ob_ref) != NULL((void*)0)) {
2168 PyCell_Set(x, NULL((void*)0));
2169 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2170 }
2171 err = -1;
2172 format_exc_unbound(co, oparg);
2173 break;
2174
2175 TARGET(LOAD_CLOSURE)TARGET_LOAD_CLOSURE: opcode = 135; if (((135) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 135:
2176 x = freevars[oparg];
2177 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
2178 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2178, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2179 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2180 break;
2181
2182 TARGET(LOAD_DEREF)TARGET_LOAD_DEREF: opcode = 136; if (((136) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 136:
2183 x = freevars[oparg];
2184 w = PyCell_Get(x);
2185 if (w != NULL((void*)0)) {
2186 PUSH(w){ (void)((*stack_pointer++ = (w)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2186, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2187 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2188 }
2189 err = -1;
2190 format_exc_unbound(co, oparg);
2191 break;
2192
2193 TARGET(STORE_DEREF)TARGET_STORE_DEREF: opcode = 137; if (((137) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 137:
2194 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2195 x = freevars[oparg];
2196 PyCell_Set(x, w);
2197 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2197, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2198 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2199
2200 TARGET(BUILD_TUPLE)TARGET_BUILD_TUPLE: opcode = 102; if (((102) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 102:
2201 x = PyTuple_New(oparg);
2202 if (x != NULL((void*)0)) {
2203 for (; --oparg >= 0;) {
2204 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2205 PyTuple_SET_ITEM(x, oparg, w)(((PyTupleObject *)(x))->ob_item[oparg] = w);
2206 }
2207 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2207, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2208 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2209 }
2210 break;
2211
2212 TARGET(BUILD_LIST)TARGET_BUILD_LIST: opcode = 103; if (((103) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 103:
2213 x = PyList_New(oparg);
2214 if (x != NULL((void*)0)) {
2215 for (; --oparg >= 0;) {
2216 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2217 PyList_SET_ITEM(x, oparg, w)(((PyListObject *)(x))->ob_item[oparg] = (w));
2218 }
2219 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2219, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2220 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2221 }
2222 break;
2223
2224 TARGET(BUILD_SET)TARGET_BUILD_SET: opcode = 104; if (((104) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 104:
2225 x = PySet_New(NULL((void*)0));
2226 if (x != NULL((void*)0)) {
2227 for (; --oparg >= 0;) {
2228 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2229 if (err == 0)
2230 err = PySet_Add(x, w);
2231 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2231, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2232 }
2233 if (err != 0) {
2234 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2234, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
2235 break;
2236 }
2237 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2237, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2238 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2239 }
2240 break;
2241
2242 TARGET(BUILD_MAP)TARGET_BUILD_MAP: opcode = 105; if (((105) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 105:
2243 x = _PyDict_NewPresized((Py_ssize_t)oparg);
2244 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2244, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2245 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2246 break;
2247
2248 TARGET(STORE_MAP)TARGET_STORE_MAP: opcode = 54; if (((54) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 54:
2249 w = TOP()(stack_pointer[-1]); /* key */
2250 u = SECOND()(stack_pointer[-2]); /* value */
2251 v = THIRD()(stack_pointer[-3]); /* dict */
2252 STACKADJ(-2){ (void)((stack_pointer += -2), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2252, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2253 assert (PyDict_CheckExact(v))(__builtin_expect(!(((((PyObject*)(v))->ob_type) == &PyDict_Type
)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2253, "PyDict_CheckExact(v)"
) : (void)0)
;
2254 err = PyDict_SetItem(v, w, u); /* v[w] = u */
2255 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2255, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2256 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2256, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2257 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2258 break;
2259
2260 TARGET(MAP_ADD)TARGET_MAP_ADD: opcode = 147; if (((147) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 147:
2261 w = TOP()(stack_pointer[-1]); /* key */
2262 u = SECOND()(stack_pointer[-2]); /* value */
2263 STACKADJ(-2){ (void)((stack_pointer += -2), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2263, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2264 v = stack_pointer[-oparg]; /* dict */
2265 assert (PyDict_CheckExact(v))(__builtin_expect(!(((((PyObject*)(v))->ob_type) == &PyDict_Type
)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2265, "PyDict_CheckExact(v)"
) : (void)0)
;
2266 err = PyDict_SetItem(v, w, u); /* v[w] = u */
2267 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2267, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2268 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2268, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2269 if (err == 0) {
2270 PREDICT(JUMP_ABSOLUTE)if (0) goto PRED_JUMP_ABSOLUTE;
2271 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2272 }
2273 break;
2274
2275 TARGET(LOAD_ATTR)TARGET_LOAD_ATTR: opcode = 106; if (((106) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 106:
2276 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2277 v = TOP()(stack_pointer[-1]);
2278 x = PyObject_GetAttr(v, w);
2279 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2279, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2280 SET_TOP(x)(stack_pointer[-1] = (x));
2281 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2282 break;
2283
2284 TARGET(COMPARE_OP)TARGET_COMPARE_OP: opcode = 107; if (((107) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 107:
2285 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2286 v = TOP()(stack_pointer[-1]);
2287 x = cmp_outcome(oparg, v, w);
2288 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2288, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2289 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2289, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2290 SET_TOP(x)(stack_pointer[-1] = (x));
2291 if (x == NULL((void*)0)) break;
2292 PREDICT(POP_JUMP_IF_FALSE)if (0) goto PRED_POP_JUMP_IF_FALSE;
2293 PREDICT(POP_JUMP_IF_TRUE)if (0) goto PRED_POP_JUMP_IF_TRUE;
2294 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2295
2296 TARGET(IMPORT_NAME)TARGET_IMPORT_NAME: opcode = 108; if (((108) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 108:
2297 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2298 x = PyDict_GetItemString(f->f_builtins, "__import__");
2299 if (x == NULL((void*)0)) {
2300 PyErr_SetString(PyExc_ImportError,
2301 "__import__ not found");
2302 break;
2303 }
2304 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
2305 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2306 u = TOP()(stack_pointer[-1]);
2307 if (PyLong_AsLong(u) != -1 || PyErr_Occurred())
2308 w = PyTuple_Pack(5,
2309 w,
2310 f->f_globals,
2311 f->f_locals == NULL((void*)0) ?
2312 Py_None(&_Py_NoneStruct) : f->f_locals,
2313 v,
2314 u);
2315 else
2316 w = PyTuple_Pack(4,
2317 w,
2318 f->f_globals,
2319 f->f_locals == NULL((void*)0) ?
2320 Py_None(&_Py_NoneStruct) : f->f_locals,
2321 v);
2322 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2322, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2323 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2323, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2324 if (w == NULL((void*)0)) {
2325 u = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2326 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2326, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
2327 x = NULL((void*)0);
2328 break;
2329 }
2330 READ_TIMESTAMP(intr0);
2331 v = x;
2332 x = PyEval_CallObject(v, w)PyEval_CallObjectWithKeywords(v, w, (PyObject *)((void*)0));
2333 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2333, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2334 READ_TIMESTAMP(intr1);
2335 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2335, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2336 SET_TOP(x)(stack_pointer[-1] = (x));
2337 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2338 break;
2339
2340 TARGET(IMPORT_STAR)TARGET_IMPORT_STAR: opcode = 84; if (((84) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 84:
2341 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2342 PyFrame_FastToLocals(f);
2343 if ((x = f->f_locals) == NULL((void*)0)) {
2344 PyErr_SetString(PyExc_SystemError,
2345 "no locals found during 'import *'");
2346 break;
2347 }
2348 READ_TIMESTAMP(intr0);
2349 err = import_all_from(x, v);
2350 READ_TIMESTAMP(intr1);
2351 PyFrame_LocalsToFast(f, 0);
2352 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2352, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2353 if (err == 0) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2354 break;
2355
2356 TARGET(IMPORT_FROM)TARGET_IMPORT_FROM: opcode = 109; if (((109) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 109:
2357 w = GETITEM(names, oparg)PyTuple_GetItem((names), (oparg));
2358 v = TOP()(stack_pointer[-1]);
2359 READ_TIMESTAMP(intr0);
2360 x = import_from(v, w);
2361 READ_TIMESTAMP(intr1);
2362 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2362, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2363 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2364 break;
2365
2366 TARGET(JUMP_FORWARD)TARGET_JUMP_FORWARD: opcode = 110; if (((110) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 110:
2367 JUMPBY(oparg)(next_instr += (oparg));
2368 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2369
2370 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE)PRED_POP_JUMP_IF_FALSE:;
2371 TARGET(POP_JUMP_IF_FALSE)TARGET_POP_JUMP_IF_FALSE: opcode = 114; if (((114) >= 90))
oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 114:
2372 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2373 if (w == Py_True((PyObject *) &_Py_TrueStruct)) {
2374 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2374, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2375 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2376 }
2377 if (w == Py_False((PyObject *) &_Py_FalseStruct)) {
2378 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2378, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2379 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2380 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2381 }
2382 err = PyObject_IsTrue(w);
2383 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2383, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2384 if (err > 0)
2385 err = 0;
2386 else if (err == 0)
2387 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2388 else
2389 break;
2390 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2391
2392 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE)PRED_POP_JUMP_IF_TRUE:;
2393 TARGET(POP_JUMP_IF_TRUE)TARGET_POP_JUMP_IF_TRUE: opcode = 115; if (((115) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 115:
2394 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2395 if (w == Py_False((PyObject *) &_Py_FalseStruct)) {
2396 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2396, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2397 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2398 }
2399 if (w == Py_True((PyObject *) &_Py_TrueStruct)) {
2400 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2400, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2401 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2402 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2403 }
2404 err = PyObject_IsTrue(w);
2405 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2405, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2406 if (err > 0) {
2407 err = 0;
2408 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2409 }
2410 else if (err == 0)
2411 ;
2412 else
2413 break;
2414 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2415
2416 TARGET(JUMP_IF_FALSE_OR_POP)TARGET_JUMP_IF_FALSE_OR_POP: opcode = 111; if (((111) >= 90
)) oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 111:
2417 w = TOP()(stack_pointer[-1]);
2418 if (w == Py_True((PyObject *) &_Py_TrueStruct)) {
2419 STACKADJ(-1){ (void)((stack_pointer += -1), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2419, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2420 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2420, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2421 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2422 }
2423 if (w == Py_False((PyObject *) &_Py_FalseStruct)) {
2424 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2425 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2426 }
2427 err = PyObject_IsTrue(w);
2428 if (err > 0) {
2429 STACKADJ(-1){ (void)((stack_pointer += -1), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2429, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2430 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2430, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2431 err = 0;
2432 }
2433 else if (err == 0)
2434 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2435 else
2436 break;
2437 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2438
2439 TARGET(JUMP_IF_TRUE_OR_POP)TARGET_JUMP_IF_TRUE_OR_POP: opcode = 112; if (((112) >= 90
)) oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 112:
2440 w = TOP()(stack_pointer[-1]);
2441 if (w == Py_False((PyObject *) &_Py_FalseStruct)) {
2442 STACKADJ(-1){ (void)((stack_pointer += -1), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2442, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2443 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2443, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2444 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2445 }
2446 if (w == Py_True((PyObject *) &_Py_TrueStruct)) {
2447 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2448 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2449 }
2450 err = PyObject_IsTrue(w);
2451 if (err > 0) {
2452 err = 0;
2453 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2454 }
2455 else if (err == 0) {
2456 STACKADJ(-1){ (void)((stack_pointer += -1), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2456, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2457 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2457, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2458 }
2459 else
2460 break;
2461 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2462
2463 PREDICTED_WITH_ARG(JUMP_ABSOLUTE)PRED_JUMP_ABSOLUTE:;
2464 TARGET(JUMP_ABSOLUTE)TARGET_JUMP_ABSOLUTE: opcode = 113; if (((113) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 113:
2465 JUMPTO(oparg)(next_instr = first_instr + (oparg));
2466#if FAST_LOOPS
2467 /* Enabling this path speeds-up all while and for-loops by bypassing
2468 the per-loop checks for signals. By default, this should be turned-off
2469 because it prevents detection of a control-break in tight loops like
2470 "while 1: pass". Compile with this option turned-on when you need
2471 the speed-up and do not need break checking inside tight loops (ones
2472 that contain only instructions ending with FAST_DISPATCH).
2473 */
2474 FAST_DISPATCH(){ if (!lltrace && !_Py_TracingPossible) { f->f_lasti
= ((int)(next_instr - first_instr)); goto *opcode_targets[*next_instr
++]; } goto fast_next_opcode; }
;
2475#else
2476 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2477#endif
2478
2479 TARGET(GET_ITER)TARGET_GET_ITER: opcode = 68; if (((68) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 68:
2480 /* before: [obj]; after [getiter(obj)] */
2481 v = TOP()(stack_pointer[-1]);
2482 x = PyObject_GetIter(v);
2483 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2483, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2484 if (x != NULL((void*)0)) {
2485 SET_TOP(x)(stack_pointer[-1] = (x));
2486 PREDICT(FOR_ITER)if (0) goto PRED_FOR_ITER;
2487 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2488 }
2489 STACKADJ(-1){ (void)((stack_pointer += -1), lltrace && prtrace((stack_pointer
[-1]), "stackadj")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2489, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2490 break;
2491
2492 PREDICTED_WITH_ARG(FOR_ITER)PRED_FOR_ITER:;
2493 TARGET(FOR_ITER)TARGET_FOR_ITER: opcode = 93; if (((93) >= 90)) oparg = (next_instr
+= 2, (next_instr[-1]<<8) + next_instr[-2]); case 93:
2494 /* before: [iter]; after: [iter, iter()] *or* [] */
2495 v = TOP()(stack_pointer[-1]);
2496 x = (*v->ob_type->tp_iternext)(v);
2497 if (x != NULL((void*)0)) {
2498 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2498, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2499 PREDICT(STORE_FAST)if (0) goto PRED_STORE_FAST;
2500 PREDICT(UNPACK_SEQUENCE)if (0) goto PRED_UNPACK_SEQUENCE;
2501 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2502 }
2503 if (PyErr_Occurred()) {
2504 if (!PyErr_ExceptionMatches(
2505 PyExc_StopIteration))
2506 break;
2507 PyErr_Clear();
2508 }
2509 /* iterator ended normally */
2510 x = v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2511 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2511, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2512 JUMPBY(oparg)(next_instr += (oparg));
2513 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2514
2515 TARGET(BREAK_LOOP)TARGET_BREAK_LOOP: opcode = 80; if (((80) >= 90)) oparg = (
next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
case 80:
2516 why = WHY_BREAK;
2517 goto fast_block_end;
2518
2519 TARGET(CONTINUE_LOOP)TARGET_CONTINUE_LOOP: opcode = 119; if (((119) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 119:
2520 retval = PyLong_FromLong(oparg);
2521 if (!retval) {
2522 x = NULL((void*)0);
2523 break;
2524 }
2525 why = WHY_CONTINUE;
2526 goto fast_block_end;
2527
2528 TARGET_WITH_IMPL(SETUP_LOOP, _setup_finally)TARGET_SETUP_LOOP: opcode = 120; if (((120) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 120: goto _setup_finally;
2529 TARGET_WITH_IMPL(SETUP_EXCEPT, _setup_finally)TARGET_SETUP_EXCEPT: opcode = 121; if (((121) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 121: goto _setup_finally;
2530 TARGET(SETUP_FINALLY)TARGET_SETUP_FINALLY: opcode = 122; if (((122) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 122:
2531 _setup_finally:
2532 /* NOTE: If you add any new block-setup opcodes that
2533 are not try/except/finally handlers, you may need
2534 to update the PyGen_NeedsFinalizing() function.
2535 */
2536
2537 PyFrame_BlockSetup(f, opcode, INSTR_OFFSET()((int)(next_instr - first_instr)) + oparg,
2538 STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)));
2539 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2540
2541 TARGET(SETUP_WITH)TARGET_SETUP_WITH: opcode = 143; if (((143) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 143:
2542 {
2543 static PyObject *exit, *enter;
2544 w = TOP()(stack_pointer[-1]);
2545 x = special_lookup(w, "__exit__", &exit);
2546 if (!x)
2547 break;
2548 SET_TOP(x)(stack_pointer[-1] = (x));
2549 u = special_lookup(w, "__enter__", &enter);
2550 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2550, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2551 if (!u) {
2552 x = NULL((void*)0);
2553 break;
2554 }
2555 x = PyObject_CallFunctionObjArgs(u, NULL((void*)0));
2556 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2556, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2557 if (!x)
2558 break;
2559 /* Setup the finally block before pushing the result
2560 of __enter__ on the stack. */
2561 PyFrame_BlockSetup(f, SETUP_FINALLY122, INSTR_OFFSET()((int)(next_instr - first_instr)) + oparg,
2562 STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)));
2563
2564 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2564, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2565 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2566 }
2567
2568 TARGET(WITH_CLEANUP)TARGET_WITH_CLEANUP: opcode = 81; if (((81) >= 90)) oparg =
(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]
); case 81:
2569 {
2570 /* At the top of the stack are 1-3 values indicating
2571 how/why we entered the finally clause:
2572 - TOP = None
2573 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2574 - TOP = WHY_*; no retval below it
2575 - (TOP, SECOND, THIRD) = exc_info()
2576 (FOURTH, FITH, SIXTH) = previous exception for EXCEPT_HANDLER
2577 Below them is EXIT, the context.__exit__ bound method.
2578 In the last case, we must call
2579 EXIT(TOP, SECOND, THIRD)
2580 otherwise we must call
2581 EXIT(None, None, None)
2582
2583 In the first two cases, we remove EXIT from the
2584 stack, leaving the rest in the same order. In the
2585 third case, we shift the bottom 3 values of the
2586 stack down, and replace the empty spot with NULL.
2587
2588 In addition, if the stack represents an exception,
2589 *and* the function call returns a 'true' value, we
2590 push WHY_SILENCED onto the stack. END_FINALLY will
2591 then not re-raise the exception. (But non-local
2592 gotos should still be resumed.)
2593 */
2594
2595 PyObject *exit_func;
2596 u = TOP()(stack_pointer[-1]);
2597 if (u == Py_None(&_Py_NoneStruct)) {
2598 (void)POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2599 exit_func = TOP()(stack_pointer[-1]);
2600 SET_TOP(u)(stack_pointer[-1] = (u));
2601 v = w = Py_None(&_Py_NoneStruct);
2602 }
2603 else if (PyLong_Check(u)((((((PyObject*)(u))->ob_type))->tp_flags & ((1L<<
24))) != 0)
) {
2604 (void)POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2605 switch(PyLong_AsLong(u)) {
2606 case WHY_RETURN:
2607 case WHY_CONTINUE:
2608 /* Retval in TOP. */
2609 exit_func = SECOND()(stack_pointer[-2]);
2610 SET_SECOND(TOP())(stack_pointer[-2] = ((stack_pointer[-1])));
2611 SET_TOP(u)(stack_pointer[-1] = (u));
2612 break;
2613 default:
2614 exit_func = TOP()(stack_pointer[-1]);
2615 SET_TOP(u)(stack_pointer[-1] = (u));
2616 break;
2617 }
2618 u = v = w = Py_None(&_Py_NoneStruct);
2619 }
2620 else {
2621 PyObject *tp, *exc, *tb;
2622 PyTryBlock *block;
2623 v = SECOND()(stack_pointer[-2]);
2624 w = THIRD()(stack_pointer[-3]);
2625 tp = FOURTH()(stack_pointer[-4]);
2626 exc = PEEK(5)(stack_pointer[-(5)]);
2627 tb = PEEK(6)(stack_pointer[-(6)]);
2628 exit_func = PEEK(7)(stack_pointer[-(7)]);
2629 SET_VALUE(7, tb)(stack_pointer[-(7)] = (tb));
2630 SET_VALUE(6, exc)(stack_pointer[-(6)] = (exc));
2631 SET_VALUE(5, tp)(stack_pointer[-(5)] = (tp));
2632 /* UNWIND_EXCEPT_HANDLER will pop this off. */
2633 SET_FOURTH(NULL)(stack_pointer[-4] = (((void*)0)));
2634 /* We just shifted the stack down, so we have
2635 to tell the except handler block that the
2636 values are lower than it expects. */
2637 block = &f->f_blockstack[f->f_iblock - 1];
2638 assert(block->b_type == EXCEPT_HANDLER)(__builtin_expect(!(block->b_type == 257), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2638, "block->b_type == EXCEPT_HANDLER"
) : (void)0)
;
2639 block->b_level--;
2640 }
2641 /* XXX Not the fastest way to call it... */
2642 x = PyObject_CallFunctionObjArgs(exit_func, u, v, w,
2643 NULL((void*)0));
2644 Py_DECREF(exit_func)do { if (_Py_RefTotal-- , --((PyObject*)(exit_func))->ob_refcnt
!= 0) { if (((PyObject*)exit_func)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2644, (PyObject *)(exit_func)); } else _Py_Dealloc
((PyObject *)(exit_func)); } while (0)
;
2645 if (x == NULL((void*)0))
2646 break; /* Go to error exit */
2647
2648 if (u != Py_None(&_Py_NoneStruct))
2649 err = PyObject_IsTrue(x);
2650 else
2651 err = 0;
2652 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2652, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
2653
2654 if (err < 0)
2655 break; /* Go to error exit */
2656 else if (err > 0) {
2657 err = 0;
2658 /* There was an exception and a True return */
2659 PUSH(PyLong_FromLong((long) WHY_SILENCED)){ (void)((*stack_pointer++ = (PyLong_FromLong((long) WHY_SILENCED
))), lltrace && prtrace((stack_pointer[-1]), "push"))
; (__builtin_expect(!(((int)(stack_pointer - f->f_valuestack
)) <= co->co_stacksize), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 2659, "STACK_LEVEL() <= co->co_stacksize") : (void)0)
; }
;
2660 }
2661 PREDICT(END_FINALLY)if (0) goto PRED_END_FINALLY;
2662 break;
2663 }
2664
2665 TARGET(CALL_FUNCTION)TARGET_CALL_FUNCTION: opcode = 131; if (((131) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 131:
2666 {
2667 PyObject **sp;
2668 PCALL(PCALL_ALL);
2669 sp = stack_pointer;
2670#ifdef WITH_TSC
2671 x = call_function(&sp, oparg, &intr0, &intr1);
2672#else
2673 x = call_function(&sp, oparg);
2674#endif
2675 stack_pointer = sp;
2676 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2676, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2677 if (x != NULL((void*)0))
2678 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2679 break;
2680 }
2681
2682 TARGET_WITH_IMPL(CALL_FUNCTION_VAR, _call_function_var_kw)TARGET_CALL_FUNCTION_VAR: opcode = 140; if (((140) >= 90))
oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 140: goto _call_function_var_kw;
2683 TARGET_WITH_IMPL(CALL_FUNCTION_KW, _call_function_var_kw)TARGET_CALL_FUNCTION_KW: opcode = 141; if (((141) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 141: goto _call_function_var_kw;
2684 TARGET(CALL_FUNCTION_VAR_KW)TARGET_CALL_FUNCTION_VAR_KW: opcode = 142; if (((142) >= 90
)) oparg = (next_instr += 2, (next_instr[-1]<<8) + next_instr
[-2]); case 142:
2685 _call_function_var_kw:
2686 {
2687 int na = oparg & 0xff;
2688 int nk = (oparg>>8) & 0xff;
2689 int flags = (opcode - CALL_FUNCTION131) & 3;
2690 int n = na + 2 * nk;
2691 PyObject **pfunc, *func, **sp;
2692 PCALL(PCALL_ALL);
2693 if (flags & CALL_FLAG_VAR1)
2694 n++;
2695 if (flags & CALL_FLAG_KW2)
2696 n++;
2697 pfunc = stack_pointer - n - 1;
2698 func = *pfunc;
2699
2700 if (PyMethod_Check(func)((func)->ob_type == &PyMethod_Type)
2701 && PyMethod_GET_SELF(func)(((PyMethodObject *)func) -> im_self) != NULL((void*)0)) {
2702 PyObject *self = PyMethod_GET_SELF(func)(((PyMethodObject *)func) -> im_self);
2703 Py_INCREF(self)( _Py_RefTotal++ , ((PyObject*)(self))->ob_refcnt++);
2704 func = PyMethod_GET_FUNCTION(func)(((PyMethodObject *)func) -> im_func);
2705 Py_INCREF(func)( _Py_RefTotal++ , ((PyObject*)(func))->ob_refcnt++);
2706 Py_DECREF(*pfunc)do { if (_Py_RefTotal-- , --((PyObject*)(*pfunc))->ob_refcnt
!= 0) { if (((PyObject*)*pfunc)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2706, (PyObject *)(*pfunc)); } else _Py_Dealloc
((PyObject *)(*pfunc)); } while (0)
;
2707 *pfunc = self;
2708 na++;
2709 /* n++; */
2710 } else
2711 Py_INCREF(func)( _Py_RefTotal++ , ((PyObject*)(func))->ob_refcnt++);
2712 sp = stack_pointer;
2713 READ_TIMESTAMP(intr0);
2714 x = ext_do_call(func, &sp, flags, na, nk);
2715 READ_TIMESTAMP(intr1);
2716 stack_pointer = sp;
2717 Py_DECREF(func)do { if (_Py_RefTotal-- , --((PyObject*)(func))->ob_refcnt
!= 0) { if (((PyObject*)func)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2717, (PyObject *)(func)); } else _Py_Dealloc
((PyObject *)(func)); } while (0)
;
2718
2719 while (stack_pointer > pfunc) {
2720 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2721 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2721, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2722 }
2723 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2723, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2724 if (x != NULL((void*)0))
2725 DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2726 break;
2727 }
2728
2729 TARGET_WITH_IMPL(MAKE_CLOSURE, _make_function)TARGET_MAKE_CLOSURE: opcode = 134; if (((134) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 134: goto _make_function;
2730 TARGET(MAKE_FUNCTION)TARGET_MAKE_FUNCTION: opcode = 132; if (((132) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 132:
2731 _make_function:
2732 {
2733 int posdefaults = oparg & 0xff;
2734 int kwdefaults = (oparg>>8) & 0xff;
2735 int num_annotations = (oparg >> 16) & 0x7fff;
2736
2737 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; /* code object */
2738 x = PyFunction_New(v, f->f_globals);
2739 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2739, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2740
2741 if (x != NULL((void*)0) && opcode == MAKE_CLOSURE134) {
2742 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2743 if (PyFunction_SetClosure(x, v) != 0) {
2744 /* Can't happen unless bytecode is corrupt. */
2745 why = WHY_EXCEPTION;
2746 }
2747 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2747, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2748 }
2749
2750 if (x != NULL((void*)0) && num_annotations > 0) {
2751 Py_ssize_t name_ix;
2752 u = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; /* names of args with annotations */
2753 v = PyDict_New();
2754 if (v == NULL((void*)0)) {
2755 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2755, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
2756 x = NULL((void*)0);
2757 break;
2758 }
2759 name_ix = PyTuple_Size(u);
2760 assert(num_annotations == name_ix+1)(__builtin_expect(!(num_annotations == name_ix+1), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2760, "num_annotations == name_ix+1"
) : (void)0)
;
2761 while (name_ix > 0) {
2762 --name_ix;
2763 t = PyTuple_GET_ITEM(u, name_ix)(((PyTupleObject *)(u))->ob_item[name_ix]);
2764 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2765 /* XXX(nnorwitz): check for errors */
2766 PyDict_SetItem(v, t, w);
2767 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2767, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2768 }
2769
2770 if (PyFunction_SetAnnotations(x, v) != 0) {
2771 /* Can't happen unless
2772 PyFunction_SetAnnotations changes. */
2773 why = WHY_EXCEPTION;
2774 }
2775 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2775, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2776 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2776, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2777 }
2778
2779 /* XXX Maybe this should be a separate opcode? */
2780 if (x != NULL((void*)0) && posdefaults > 0) {
2781 v = PyTuple_New(posdefaults);
2782 if (v == NULL((void*)0)) {
2783 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2783, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
2784 x = NULL((void*)0);
2785 break;
2786 }
2787 while (--posdefaults >= 0) {
2788 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2789 PyTuple_SET_ITEM(v, posdefaults, w)(((PyTupleObject *)(v))->ob_item[posdefaults] = w);
2790 }
2791 if (PyFunction_SetDefaults(x, v) != 0) {
2792 /* Can't happen unless
2793 PyFunction_SetDefaults changes. */
2794 why = WHY_EXCEPTION;
2795 }
2796 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2796, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2797 }
2798 if (x != NULL((void*)0) && kwdefaults > 0) {
2799 v = PyDict_New();
2800 if (v == NULL((void*)0)) {
2801 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2801, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
;
2802 x = NULL((void*)0);
2803 break;
2804 }
2805 while (--kwdefaults >= 0) {
2806 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; /* default value */
2807 u = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
; /* kw only arg name */
2808 /* XXX(nnorwitz): check for errors */
2809 PyDict_SetItem(v, u, w);
2810 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2810, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
2811 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2811, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2812 }
2813 if (PyFunction_SetKwDefaults(x, v) != 0) {
2814 /* Can't happen unless
2815 PyFunction_SetKwDefaults changes. */
2816 why = WHY_EXCEPTION;
2817 }
2818 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2818, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2819 }
2820 PUSH(x){ (void)((*stack_pointer++ = (x)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2820, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2821 break;
2822 }
2823
2824 TARGET(BUILD_SLICE)TARGET_BUILD_SLICE: opcode = 133; if (((133) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 133:
2825 if (oparg == 3)
2826 w = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2827 else
2828 w = NULL((void*)0);
2829 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
2830 u = TOP()(stack_pointer[-1]);
2831 x = PySlice_New(u, v, w);
2832 Py_DECREF(u)do { if (_Py_RefTotal-- , --((PyObject*)(u))->ob_refcnt !=
0) { if (((PyObject*)u)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2832, (PyObject *)(u)); } else _Py_Dealloc
((PyObject *)(u)); } while (0)
;
2833 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2833, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
2834 Py_XDECREF(w)do { if ((w) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(w))->ob_refcnt != 0) { if (((PyObject*)w)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 2834
, (PyObject *)(w)); } else _Py_Dealloc((PyObject *)(w)); } while
(0); } while (0)
;
2835 SET_TOP(x)(stack_pointer[-1] = (x));
2836 if (x != NULL((void*)0)) DISPATCH(){ if (!__extension__ ({ __typeof__(&eval_breaker) atomic_val
= &eval_breaker; __typeof__(atomic_val->_value) result
; volatile __typeof__(result) *volatile_data = &atomic_val
->_value; _Py_memory_order order = _Py_memory_order_relaxed
; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); ; switch(order
) { case _Py_memory_order_release: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_thread_fence(_Py_memory_order_release
); break; default: break; } result = *volatile_data; switch(order
) { case _Py_memory_order_acquire: case _Py_memory_order_acq_rel
: case _Py_memory_order_seq_cst: _Py_atomic_signal_fence(_Py_memory_order_acquire
); break; default: break; } ; result; })) { { if (!lltrace &&
!_Py_TracingPossible) { f->f_lasti = ((int)(next_instr - first_instr
)); goto *opcode_targets[*next_instr++]; } goto fast_next_opcode
; }; } continue; }
;
2837 break;
2838
2839 TARGET(EXTENDED_ARG)TARGET_EXTENDED_ARG: opcode = 144; if (((144) >= 90)) oparg
= (next_instr += 2, (next_instr[-1]<<8) + next_instr[-
2]); case 144:
2840 opcode = NEXTOP()(*next_instr++);
2841 oparg = oparg<<16 | NEXTARG()(next_instr += 2, (next_instr[-1]<<8) + next_instr[-2]);
2842 goto dispatch_opcode;
2843
2844#if USE_COMPUTED_GOTOS1
2845 _unknown_opcode:
2846#endif
2847 default:
2848 fprintf(stderr__stderrp,
2849 "XXX lineno: %d, opcode: %d\n",
2850 PyFrame_GetLineNumber(f),
2851 opcode);
2852 PyErr_SetString(PyExc_SystemError, "unknown opcode");
2853 why = WHY_EXCEPTION;
2854 break;
2855
2856#ifdef CASE_TOO_BIG
2857 }
2858#endif
2859
2860 } /* switch */
2861
2862 on_error:
2863
2864 READ_TIMESTAMP(inst1);
2865
2866 /* Quickly continue if no error occurred */
2867
2868 if (why == WHY_NOT) {
2869 if (err == 0 && x != NULL((void*)0)) {
2870#ifdef CHECKEXC1
2871 /* This check is expensive! */
2872 if (PyErr_Occurred())
2873 fprintf(stderr__stderrp,
2874 "XXX undetected error\n");
2875 else {
2876#endif
2877 READ_TIMESTAMP(loop1);
2878 continue; /* Normal, fast path */
2879#ifdef CHECKEXC1
2880 }
2881#endif
2882 }
2883 why = WHY_EXCEPTION;
2884 x = Py_None(&_Py_NoneStruct);
2885 err = 0;
2886 }
2887
2888 /* Double-check exception status */
2889
2890 if (why == WHY_EXCEPTION || why == WHY_RERAISE) {
2891 if (!PyErr_Occurred()) {
2892 PyErr_SetString(PyExc_SystemError,
2893 "error return without exception set");
2894 why = WHY_EXCEPTION;
2895 }
2896 }
2897#ifdef CHECKEXC1
2898 else {
2899 /* This check is expensive! */
2900 if (PyErr_Occurred()) {
2901 char buf[128];
2902 sprintf(buf, "Stack unwind with exception "__builtin___sprintf_chk (buf, 0, __builtin_object_size (buf, 2
> 1), "Stack unwind with exception " "set and why=%d", why
)
2903 "set and why=%d", why)__builtin___sprintf_chk (buf, 0, __builtin_object_size (buf, 2
> 1), "Stack unwind with exception " "set and why=%d", why
)
;
2904 Py_FatalError(buf);
2905 }
2906 }
2907#endif
2908
2909 /* Log traceback info if this is a real exception */
2910
2911 if (why == WHY_EXCEPTION) {
2912 PyTraceBack_Here(f);
2913
2914 if (tstate->c_tracefunc != NULL((void*)0))
2915 call_exc_trace(tstate->c_tracefunc,
2916 tstate->c_traceobj, f);
2917 }
2918
2919 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2920
2921 if (why == WHY_RERAISE)
2922 why = WHY_EXCEPTION;
2923
2924 /* Unwind stacks if a (pseudo) exception occurred */
2925
2926fast_block_end:
2927 while (why != WHY_NOT && f->f_iblock > 0) {
2928 /* Peek at the current block. */
2929 PyTryBlock *b = &f->f_blockstack[f->f_iblock - 1];
2930
2931 assert(why != WHY_YIELD)(__builtin_expect(!(why != WHY_YIELD), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 2931, "why != WHY_YIELD") : (void)0)
;
2932 if (b->b_type == SETUP_LOOP120 && why == WHY_CONTINUE) {
2933 why = WHY_NOT;
2934 JUMPTO(PyLong_AS_LONG(retval))(next_instr = first_instr + (PyLong_AsLong(retval)));
2935 Py_DECREF(retval)do { if (_Py_RefTotal-- , --((PyObject*)(retval))->ob_refcnt
!= 0) { if (((PyObject*)retval)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 2935, (PyObject *)(retval)); } else _Py_Dealloc
((PyObject *)(retval)); } while (0)
;
2936 break;
2937 }
2938 /* Now we have to pop the block. */
2939 f->f_iblock--;
2940
2941 if (b->b_type == EXCEPT_HANDLER257) {
2942 UNWIND_EXCEPT_HANDLER(b){ PyObject *type, *value, *traceback; (__builtin_expect(!(((int
)(stack_pointer - f->f_valuestack)) >= (b)->b_level +
3), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2942, "STACK_LEVEL() >= (b)->b_level + 3"
) : (void)0); while (((int)(stack_pointer - f->f_valuestack
)) > (b)->b_level + 3) { value = ((void)(lltrace &&
prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer)); do
{ if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal-- ,
--((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject*
)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 2942, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0); } type = tstate->exc_type
; value = tstate->exc_value; traceback = tstate->exc_traceback
; tstate->exc_type = ((void)(lltrace && prtrace((stack_pointer
[-1]), "pop")), (*--stack_pointer)); tstate->exc_value = (
(void)(lltrace && prtrace((stack_pointer[-1]), "pop")
), (*--stack_pointer)); tstate->exc_traceback = ((void)(lltrace
&& prtrace((stack_pointer[-1]), "pop")), (*--stack_pointer
)); do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 2942, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0); do { if ((value) == ((void*
)0)) ; else do { if (_Py_RefTotal-- , --((PyObject*)(value))->
ob_refcnt != 0) { if (((PyObject*)value)->ob_refcnt < 0
) _Py_NegativeRefcount("Python/ceval.c", 2942, (PyObject *)(value
)); } else _Py_Dealloc((PyObject *)(value)); } while (0); } while
(0); do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 2942, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0); }
;
2943 continue;
2944 }
2945 UNWIND_BLOCK(b)while (((int)(stack_pointer - f->f_valuestack)) > (b)->
b_level) { PyObject *v = ((void)(lltrace && prtrace((
stack_pointer[-1]), "pop")), (*--stack_pointer)); do { if ((v
) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --((PyObject
*)(v))->ob_refcnt != 0) { if (((PyObject*)v)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 2945, (PyObject
*)(v)); } else _Py_Dealloc((PyObject *)(v)); } while (0); } while
(0); }
;
2946 if (b->b_type == SETUP_LOOP120 && why == WHY_BREAK) {
2947 why = WHY_NOT;
2948 JUMPTO(b->b_handler)(next_instr = first_instr + (b->b_handler));
2949 break;
2950 }
2951 if (why == WHY_EXCEPTION && (b->b_type == SETUP_EXCEPT121
2952 || b->b_type == SETUP_FINALLY122)) {
2953 PyObject *exc, *val, *tb;
2954 int handler = b->b_handler;
2955 /* Beware, this invalidates all b->b_* fields */
2956 PyFrame_BlockSetup(f, EXCEPT_HANDLER257, -1, STACK_LEVEL()((int)(stack_pointer - f->f_valuestack)));
2957 PUSH(tstate->exc_traceback){ (void)((*stack_pointer++ = (tstate->exc_traceback)), lltrace
&& prtrace((stack_pointer[-1]), "push")); (__builtin_expect
(!(((int)(stack_pointer - f->f_valuestack)) <= co->co_stacksize
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2957, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2958 PUSH(tstate->exc_value){ (void)((*stack_pointer++ = (tstate->exc_value)), lltrace
&& prtrace((stack_pointer[-1]), "push")); (__builtin_expect
(!(((int)(stack_pointer - f->f_valuestack)) <= co->co_stacksize
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2958, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2959 if (tstate->exc_type != NULL((void*)0)) {
2960 PUSH(tstate->exc_type){ (void)((*stack_pointer++ = (tstate->exc_type)), lltrace &&
prtrace((stack_pointer[-1]), "push")); (__builtin_expect(!((
(int)(stack_pointer - f->f_valuestack)) <= co->co_stacksize
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2960, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2961 }
2962 else {
2963 Py_INCREF(Py_None)( _Py_RefTotal++ , ((PyObject*)((&_Py_NoneStruct)))->ob_refcnt
++)
;
2964 PUSH(Py_None){ (void)((*stack_pointer++ = ((&_Py_NoneStruct))), lltrace
&& prtrace((stack_pointer[-1]), "push")); (__builtin_expect
(!(((int)(stack_pointer - f->f_valuestack)) <= co->co_stacksize
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2964, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2965 }
2966 PyErr_Fetch(&exc, &val, &tb);
2967 /* Make the raw exception data
2968 available to the handler,
2969 so a program can emulate the
2970 Python main loop. */
2971 PyErr_NormalizeException(
2972 &exc, &val, &tb);
2973 PyException_SetTraceback(val, tb);
2974 Py_INCREF(exc)( _Py_RefTotal++ , ((PyObject*)(exc))->ob_refcnt++);
2975 tstate->exc_type = exc;
2976 Py_INCREF(val)( _Py_RefTotal++ , ((PyObject*)(val))->ob_refcnt++);
2977 tstate->exc_value = val;
2978 tstate->exc_traceback = tb;
2979 if (tb == NULL((void*)0))
2980 tb = Py_None(&_Py_NoneStruct);
2981 Py_INCREF(tb)( _Py_RefTotal++ , ((PyObject*)(tb))->ob_refcnt++);
2982 PUSH(tb){ (void)((*stack_pointer++ = (tb)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2982, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2983 PUSH(val){ (void)((*stack_pointer++ = (val)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2983, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2984 PUSH(exc){ (void)((*stack_pointer++ = (exc)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2984, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2985 why = WHY_NOT;
2986 JUMPTO(handler)(next_instr = first_instr + (handler));
2987 break;
2988 }
2989 if (b->b_type == SETUP_FINALLY122) {
2990 if (why & (WHY_RETURN | WHY_CONTINUE))
2991 PUSH(retval){ (void)((*stack_pointer++ = (retval)), lltrace && prtrace
((stack_pointer[-1]), "push")); (__builtin_expect(!(((int)(stack_pointer
- f->f_valuestack)) <= co->co_stacksize), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 2991, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2992 PUSH(PyLong_FromLong((long)why)){ (void)((*stack_pointer++ = (PyLong_FromLong((long)why))), lltrace
&& prtrace((stack_pointer[-1]), "push")); (__builtin_expect
(!(((int)(stack_pointer - f->f_valuestack)) <= co->co_stacksize
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 2992, "STACK_LEVEL() <= co->co_stacksize"
) : (void)0); }
;
2993 why = WHY_NOT;
2994 JUMPTO(b->b_handler)(next_instr = first_instr + (b->b_handler));
2995 break;
2996 }
2997 } /* unwind stack */
2998
2999 /* End the loop if we still have an error (or return) */
3000
3001 if (why != WHY_NOT)
3002 break;
3003 READ_TIMESTAMP(loop1);
3004
3005 } /* main loop */
3006
3007 assert(why != WHY_YIELD)(__builtin_expect(!(why != WHY_YIELD), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 3007, "why != WHY_YIELD") : (void)0)
;
3008 /* Pop remaining stack entries. */
3009 while (!EMPTY()(((int)(stack_pointer - f->f_valuestack)) == 0)) {
3010 v = POP()((void)(lltrace && prtrace((stack_pointer[-1]), "pop"
)), (*--stack_pointer))
;
3011 Py_XDECREF(v)do { if ((v) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(v))->ob_refcnt != 0) { if (((PyObject*)v)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 3011
, (PyObject *)(v)); } else _Py_Dealloc((PyObject *)(v)); } while
(0); } while (0)
;
3012 }
3013
3014 if (why != WHY_RETURN)
3015 retval = NULL((void*)0);
3016
3017fast_yield:
3018 if (tstate->use_tracing) {
3019 if (tstate->c_tracefunc) {
3020 if (why == WHY_RETURN || why == WHY_YIELD) {
3021 if (call_trace(tstate->c_tracefunc,
3022 tstate->c_traceobj, f,
3023 PyTrace_RETURN3, retval)) {
3024 Py_XDECREF(retval)do { if ((retval) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(retval))->ob_refcnt != 0) { if (((PyObject
*)retval)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3024, (PyObject *)(retval)); } else _Py_Dealloc((PyObject *
)(retval)); } while (0); } while (0)
;
3025 retval = NULL((void*)0);
3026 why = WHY_EXCEPTION;
3027 }
3028 }
3029 else if (why == WHY_EXCEPTION) {
3030 call_trace_protected(tstate->c_tracefunc,
3031 tstate->c_traceobj, f,
3032 PyTrace_RETURN3, NULL((void*)0));
3033 }
3034 }
3035 if (tstate->c_profilefunc) {
3036 if (why == WHY_EXCEPTION)
3037 call_trace_protected(tstate->c_profilefunc,
3038 tstate->c_profileobj, f,
3039 PyTrace_RETURN3, NULL((void*)0));
3040 else if (call_trace(tstate->c_profilefunc,
3041 tstate->c_profileobj, f,
3042 PyTrace_RETURN3, retval)) {
3043 Py_XDECREF(retval)do { if ((retval) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(retval))->ob_refcnt != 0) { if (((PyObject
*)retval)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3043, (PyObject *)(retval)); } else _Py_Dealloc((PyObject *
)(retval)); } while (0); } while (0)
;
3044 retval = NULL((void*)0);
3045 why = WHY_EXCEPTION;
Value stored to 'why' is never read
3046 }
3047 }
3048 }
3049
3050 /* pop frame */
3051exit_eval_frame:
3052 Py_LeaveRecursiveCall()do{ if((--(PyThreadState_Get()->recursion_depth) < ((_Py_CheckRecursionLimit
> 100) ? (_Py_CheckRecursionLimit - 50) : (3 * (_Py_CheckRecursionLimit
>> 2))))) PyThreadState_Get()->overflowed = 0; } while
(0)
;
3053 tstate->frame = f->f_back;
3054
3055 return retval;
3056}
3057
3058/* This is gonna seem *real weird*, but if you put some other code between
3059 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
3060 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
3061
3062PyObject *
3063PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals,
3064 PyObject **args, int argcount, PyObject **kws, int kwcount,
3065 PyObject **defs, int defcount, PyObject *kwdefs, PyObject *closure)
3066{
3067 PyCodeObject* co = (PyCodeObject*)_co;
3068 register PyFrameObject *f;
3069 register PyObject *retval = NULL((void*)0);
3070 register PyObject **fastlocals, **freevars;
3071 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3072 PyObject *x, *u;
3073 int total_args = co->co_argcount + co->co_kwonlyargcount;
3074
3075 if (globals == NULL((void*)0)) {
3076 PyErr_SetString(PyExc_SystemError,
3077 "PyEval_EvalCodeEx: NULL globals");
3078 return NULL((void*)0);
3079 }
3080
3081 assert(tstate != NULL)(__builtin_expect(!(tstate != ((void*)0)), 0) ? __assert_rtn(
__func__, "Python/ceval.c", 3081, "tstate != NULL") : (void)0
)
;
3082 assert(globals != NULL)(__builtin_expect(!(globals != ((void*)0)), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 3082, "globals != NULL") : (void
)0)
;
3083 f = PyFrame_New(tstate, co, globals, locals);
3084 if (f == NULL((void*)0))
3085 return NULL((void*)0);
3086
3087 fastlocals = f->f_localsplus;
3088 freevars = f->f_localsplus + co->co_nlocals;
3089
3090 if (total_args || co->co_flags & (CO_VARARGS0x0004 | CO_VARKEYWORDS0x0008)) {
3091 int i;
3092 int n = argcount;
3093 PyObject *kwdict = NULL((void*)0);
3094 if (co->co_flags & CO_VARKEYWORDS0x0008) {
3095 kwdict = PyDict_New();
3096 if (kwdict == NULL((void*)0))
3097 goto fail;
3098 i = total_args;
3099 if (co->co_flags & CO_VARARGS0x0004)
3100 i++;
3101 SETLOCAL(i, kwdict)do { PyObject *tmp = (fastlocals[i]); (fastlocals[i]) = kwdict
; do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*
)tmp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3101, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
3102 }
3103 if (argcount > co->co_argcount) {
3104 if (!(co->co_flags & CO_VARARGS0x0004)) {
3105 PyErr_Format(PyExc_TypeError,
3106 "%U() takes %s %d "
3107 "positional argument%s (%d given)",
3108 co->co_name,
3109 defcount ? "at most" : "exactly",
3110 co->co_argcount,
3111 co->co_argcount == 1 ? "" : "s",
3112 argcount + kwcount);
3113 goto fail;
3114 }
3115 n = co->co_argcount;
3116 }
3117 for (i = 0; i < n; i++) {
3118 x = args[i];
3119 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
3120 SETLOCAL(i, x)do { PyObject *tmp = (fastlocals[i]); (fastlocals[i]) = x; do
{ if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*)tmp)
->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c",
3120, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
3121 }
3122 if (co->co_flags & CO_VARARGS0x0004) {
3123 u = PyTuple_New(argcount - n);
3124 if (u == NULL((void*)0))
3125 goto fail;
3126 SETLOCAL(total_args, u)do { PyObject *tmp = (fastlocals[total_args]); (fastlocals[total_args
]) = u; do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject
*)tmp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3126, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
3127 for (i = n; i < argcount; i++) {
3128 x = args[i];
3129 Py_INCREF(x)( _Py_RefTotal++ , ((PyObject*)(x))->ob_refcnt++);
3130 PyTuple_SET_ITEM(u, i-n, x)(((PyTupleObject *)(u))->ob_item[i-n] = x);
3131 }
3132 }
3133 for (i = 0; i < kwcount; i++) {
3134 PyObject **co_varnames;
3135 PyObject *keyword = kws[2*i];
3136 PyObject *value = kws[2*i + 1];
3137 int j;
3138 if (keyword == NULL((void*)0) || !PyUnicode_Check(keyword)((((((PyObject*)(keyword))->ob_type))->tp_flags & (
(1L<<28))) != 0)
) {
3139 PyErr_Format(PyExc_TypeError,
3140 "%U() keywords must be strings",
3141 co->co_name);
3142 goto fail;
3143 }
3144 /* Speed hack: do raw pointer compares. As names are
3145 normally interned this should almost always hit. */
3146 co_varnames = ((PyTupleObject *)(co->co_varnames))->ob_item;
3147 for (j = 0; j < total_args; j++) {
3148 PyObject *nm = co_varnames[j];
3149 if (nm == keyword)
3150 goto kw_found;
3151 }
3152 /* Slow fallback, just in case */
3153 for (j = 0; j < total_args; j++) {
3154 PyObject *nm = co_varnames[j];
3155 int cmp = PyObject_RichCompareBool(
3156 keyword, nm, Py_EQ2);
3157 if (cmp > 0)
3158 goto kw_found;
3159 else if (cmp < 0)
3160 goto fail;
3161 }
3162 if (j >= total_args && kwdict == NULL((void*)0)) {
3163 PyErr_Format(PyExc_TypeError,
3164 "%U() got an unexpected "
3165 "keyword argument '%S'",
3166 co->co_name,
3167 keyword);
3168 goto fail;
3169 }
3170 PyDict_SetItem(kwdict, keyword, value);
3171 continue;
3172 kw_found:
3173 if (GETLOCAL(j)(fastlocals[j]) != NULL((void*)0)) {
3174 PyErr_Format(PyExc_TypeError,
3175 "%U() got multiple "
3176 "values for keyword "
3177 "argument '%S'",
3178 co->co_name,
3179 keyword);
3180 goto fail;
3181 }
3182 Py_INCREF(value)( _Py_RefTotal++ , ((PyObject*)(value))->ob_refcnt++);
3183 SETLOCAL(j, value)do { PyObject *tmp = (fastlocals[j]); (fastlocals[j]) = value
; do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*
)tmp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3183, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
3184 }
3185 if (co->co_kwonlyargcount > 0) {
3186 for (i = co->co_argcount; i < total_args; i++) {
3187 PyObject *name;
3188 if (GETLOCAL(i)(fastlocals[i]) != NULL((void*)0))
3189 continue;
3190 name = PyTuple_GET_ITEM(co->co_varnames, i)(((PyTupleObject *)(co->co_varnames))->ob_item[i]);
3191 if (kwdefs != NULL((void*)0)) {
3192 PyObject *def = PyDict_GetItem(kwdefs, name);
3193 if (def) {
3194 Py_INCREF(def)( _Py_RefTotal++ , ((PyObject*)(def))->ob_refcnt++);
3195 SETLOCAL(i, def)do { PyObject *tmp = (fastlocals[i]); (fastlocals[i]) = def; do
{ if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal-- , --
((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*)tmp)
->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c",
3195, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
3196 continue;
3197 }
3198 }
3199 PyErr_Format(PyExc_TypeError,
3200 "%U() needs keyword-only argument %S",
3201 co->co_name, name);
3202 goto fail;
3203 }
3204 }
3205 if (argcount < co->co_argcount) {
3206 int m = co->co_argcount - defcount;
3207 for (i = argcount; i < m; i++) {
3208 if (GETLOCAL(i)(fastlocals[i]) == NULL((void*)0)) {
3209 int j, given = 0;
3210 for (j = 0; j < co->co_argcount; j++)
3211 if (GETLOCAL(j)(fastlocals[j]))
3212 given++;
3213 PyErr_Format(PyExc_TypeError,
3214 "%U() takes %s %d "
3215 "argument%s "
3216 "(%d given)",
3217 co->co_name,
3218 ((co->co_flags & CO_VARARGS0x0004) ||
3219 defcount) ? "at least"
3220 : "exactly",
3221 m, m == 1 ? "" : "s", given);
3222 goto fail;
3223 }
3224 }
3225 if (n > m)
3226 i = n - m;
3227 else
3228 i = 0;
3229 for (; i < defcount; i++) {
3230 if (GETLOCAL(m+i)(fastlocals[m+i]) == NULL((void*)0)) {
3231 PyObject *def = defs[i];
3232 Py_INCREF(def)( _Py_RefTotal++ , ((PyObject*)(def))->ob_refcnt++);
3233 SETLOCAL(m+i, def)do { PyObject *tmp = (fastlocals[m+i]); (fastlocals[m+i]) = def
; do { if ((tmp) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(tmp))->ob_refcnt != 0) { if (((PyObject*
)tmp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3233, (PyObject *)(tmp)); } else _Py_Dealloc((PyObject *)(tmp
)); } while (0); } while (0); } while (0)
;
3234 }
3235 }
3236 }
3237 }
3238 else if (argcount > 0 || kwcount > 0) {
3239 PyErr_Format(PyExc_TypeError,
3240 "%U() takes no arguments (%d given)",
3241 co->co_name,
3242 argcount + kwcount);
3243 goto fail;
3244 }
3245 /* Allocate and initialize storage for cell vars, and copy free
3246 vars into frame. This isn't too efficient right now. */
3247 if (PyTuple_GET_SIZE(co->co_cellvars)(((PyVarObject*)(co->co_cellvars))->ob_size)) {
3248 int i, j, nargs, found;
3249 Py_UNICODE *cellname, *argname;
3250 PyObject *c;
3251
3252 nargs = total_args;
3253 if (co->co_flags & CO_VARARGS0x0004)
3254 nargs++;
3255 if (co->co_flags & CO_VARKEYWORDS0x0008)
3256 nargs++;
3257
3258 /* Initialize each cell var, taking into account
3259 cell vars that are initialized from arguments.
3260
3261 Should arrange for the compiler to put cellvars
3262 that are arguments at the beginning of the cellvars
3263 list so that we can march over it more efficiently?
3264 */
3265 for (i = 0; i < PyTuple_GET_SIZE(co->co_cellvars)(((PyVarObject*)(co->co_cellvars))->ob_size); ++i) {
3266 cellname = PyUnicode_AS_UNICODE(((__builtin_expect(!(((((((PyObject*)((((PyTupleObject *)(co->
co_cellvars))->ob_item[i])))->ob_type))->tp_flags &
((1L<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 3267, "PyUnicode_Check( (((PyTupleObject *)(co->co_cellvars))->ob_item[i]))"
) : (void)0),(((PyUnicodeObject *)((((PyTupleObject *)(co->
co_cellvars))->ob_item[i])))->str))
3267 PyTuple_GET_ITEM(co->co_cellvars, i))((__builtin_expect(!(((((((PyObject*)((((PyTupleObject *)(co->
co_cellvars))->ob_item[i])))->ob_type))->tp_flags &
((1L<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 3267, "PyUnicode_Check( (((PyTupleObject *)(co->co_cellvars))->ob_item[i]))"
) : (void)0),(((PyUnicodeObject *)((((PyTupleObject *)(co->
co_cellvars))->ob_item[i])))->str))
;
3268 found = 0;
3269 for (j = 0; j < nargs; j++) {
3270 argname = PyUnicode_AS_UNICODE(((__builtin_expect(!(((((((PyObject*)((((PyTupleObject *)(co->
co_varnames))->ob_item[j])))->ob_type))->tp_flags &
((1L<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 3271, "PyUnicode_Check( (((PyTupleObject *)(co->co_varnames))->ob_item[j]))"
) : (void)0),(((PyUnicodeObject *)((((PyTupleObject *)(co->
co_varnames))->ob_item[j])))->str))
3271 PyTuple_GET_ITEM(co->co_varnames, j))((__builtin_expect(!(((((((PyObject*)((((PyTupleObject *)(co->
co_varnames))->ob_item[j])))->ob_type))->tp_flags &
((1L<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 3271, "PyUnicode_Check( (((PyTupleObject *)(co->co_varnames))->ob_item[j]))"
) : (void)0),(((PyUnicodeObject *)((((PyTupleObject *)(co->
co_varnames))->ob_item[j])))->str))
;
3272 if (Py_UNICODE_strcmp(cellname, argname) == 0) {
3273 c = PyCell_New(GETLOCAL(j)(fastlocals[j]));
3274 if (c == NULL((void*)0))
3275 goto fail;
3276 GETLOCAL(co->co_nlocals + i)(fastlocals[co->co_nlocals + i]) = c;
3277 found = 1;
3278 break;
3279 }
3280 }
3281 if (found == 0) {
3282 c = PyCell_New(NULL((void*)0));
3283 if (c == NULL((void*)0))
3284 goto fail;
3285 SETLOCAL(co->co_nlocals + i, c)do { PyObject *tmp = (fastlocals[co->co_nlocals + i]); (fastlocals
[co->co_nlocals + i]) = c; do { if ((tmp) == ((void*)0)) ;
else do { if (_Py_RefTotal-- , --((PyObject*)(tmp))->ob_refcnt
!= 0) { if (((PyObject*)tmp)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3285, (PyObject *)(tmp)); } else _Py_Dealloc
((PyObject *)(tmp)); } while (0); } while (0); } while (0)
;
3286 }
3287 }
3288 }
3289 if (PyTuple_GET_SIZE(co->co_freevars)(((PyVarObject*)(co->co_freevars))->ob_size)) {
3290 int i;
3291 for (i = 0; i < PyTuple_GET_SIZE(co->co_freevars)(((PyVarObject*)(co->co_freevars))->ob_size); ++i) {
3292 PyObject *o = PyTuple_GET_ITEM(closure, i)(((PyTupleObject *)(closure))->ob_item[i]);
3293 Py_INCREF(o)( _Py_RefTotal++ , ((PyObject*)(o))->ob_refcnt++);
3294 freevars[PyTuple_GET_SIZE(co->co_cellvars)(((PyVarObject*)(co->co_cellvars))->ob_size) + i] = o;
3295 }
3296 }
3297
3298 if (co->co_flags & CO_GENERATOR0x0020) {
3299 /* Don't need to keep the reference to f_back, it will be set
3300 * when the generator is resumed. */
3301 Py_XDECREF(f->f_back)do { if ((f->f_back) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(f->f_back))->ob_refcnt != 0) { if (
((PyObject*)f->f_back)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3301, (PyObject *)(f->f_back)); } else _Py_Dealloc
((PyObject *)(f->f_back)); } while (0); } while (0)
;
3302 f->f_back = NULL((void*)0);
3303
3304 PCALL(PCALL_GENERATOR);
3305
3306 /* Create a new generator that owns the ready to run frame
3307 * and return that as the value. */
3308 return PyGen_New(f);
3309 }
3310
3311 retval = PyEval_EvalFrameEx(f,0);
3312
3313fail: /* Jump here from prelude on failure */
3314
3315 /* decref'ing the frame can cause __del__ methods to get invoked,
3316 which can call back into Python. While we're done with the
3317 current Python frame (f), the associated C stack is still in use,
3318 so recursion_depth must be boosted for the duration.
3319 */
3320 assert(tstate != NULL)(__builtin_expect(!(tstate != ((void*)0)), 0) ? __assert_rtn(
__func__, "Python/ceval.c", 3320, "tstate != NULL") : (void)0
)
;
3321 ++tstate->recursion_depth;
3322 Py_DECREF(f)do { if (_Py_RefTotal-- , --((PyObject*)(f))->ob_refcnt !=
0) { if (((PyObject*)f)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3322, (PyObject *)(f)); } else _Py_Dealloc
((PyObject *)(f)); } while (0)
;
3323 --tstate->recursion_depth;
3324 return retval;
3325}
3326
3327
3328static PyObject *
3329special_lookup(PyObject *o, char *meth, PyObject **cache)
3330{
3331 PyObject *res;
3332 res = _PyObject_LookupSpecial(o, meth, cache);
3333 if (res == NULL((void*)0) && !PyErr_Occurred()) {
3334 PyErr_SetObject(PyExc_AttributeError, *cache);
3335 return NULL((void*)0);
3336 }
3337 return res;
3338}
3339
3340
3341/* Logic for the raise statement (too complicated for inlining).
3342 This *consumes* a reference count to each of its arguments. */
3343static enum why_code
3344do_raise(PyObject *exc, PyObject *cause)
3345{
3346 PyObject *type = NULL((void*)0), *value = NULL((void*)0);
3347
3348 if (exc == NULL((void*)0)) {
3349 /* Reraise */
3350 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3351 PyObject *tb;
3352 type = tstate->exc_type;
3353 value = tstate->exc_value;
3354 tb = tstate->exc_traceback;
3355 if (type == Py_None(&_Py_NoneStruct)) {
3356 PyErr_SetString(PyExc_RuntimeError,
3357 "No active exception to reraise");
3358 return WHY_EXCEPTION;
3359 }
3360 Py_XINCREF(type)do { if ((type) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(type))->ob_refcnt++); } while (0)
;
3361 Py_XINCREF(value)do { if ((value) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(value))->ob_refcnt++); } while (0)
;
3362 Py_XINCREF(tb)do { if ((tb) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(tb))->ob_refcnt++); } while (0)
;
3363 PyErr_Restore(type, value, tb);
3364 return WHY_RERAISE;
3365 }
3366
3367 /* We support the following forms of raise:
3368 raise
3369 raise <instance>
3370 raise <type> */
3371
3372 if (PyExceptionClass_Check(exc)(((((((PyObject*)((exc)))->ob_type))->tp_flags & ((
1L<<31))) != 0) && ((((PyTypeObject*)(exc))->
tp_flags & ((1L<<30))) != 0))
) {
3373 type = exc;
3374 value = PyObject_CallObject(exc, NULL((void*)0));
3375 if (value == NULL((void*)0))
3376 goto raise_error;
3377 }
3378 else if (PyExceptionInstance_Check(exc)((((exc)->ob_type)->tp_flags & ((1L<<30))) !=
0)
) {
3379 value = exc;
3380 type = PyExceptionInstance_Class(exc)((PyObject*)((exc)->ob_type));
3381 Py_INCREF(type)( _Py_RefTotal++ , ((PyObject*)(type))->ob_refcnt++);
3382 }
3383 else {
3384 /* Not something you can raise. You get an exception
3385 anyway, just not what you specified :-) */
3386 Py_DECREF(exc)do { if (_Py_RefTotal-- , --((PyObject*)(exc))->ob_refcnt !=
0) { if (((PyObject*)exc)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3386, (PyObject *)(exc)); } else _Py_Dealloc
((PyObject *)(exc)); } while (0)
;
3387 PyErr_SetString(PyExc_TypeError,
3388 "exceptions must derive from BaseException");
3389 goto raise_error;
3390 }
3391
3392 if (cause) {
3393 PyObject *fixed_cause;
3394 if (PyExceptionClass_Check(cause)(((((((PyObject*)((cause)))->ob_type))->tp_flags & (
(1L<<31))) != 0) && ((((PyTypeObject*)(cause))->
tp_flags & ((1L<<30))) != 0))
) {
3395 fixed_cause = PyObject_CallObject(cause, NULL((void*)0));
3396 if (fixed_cause == NULL((void*)0))
3397 goto raise_error;
3398 Py_DECREF(cause)do { if (_Py_RefTotal-- , --((PyObject*)(cause))->ob_refcnt
!= 0) { if (((PyObject*)cause)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3398, (PyObject *)(cause)); } else _Py_Dealloc
((PyObject *)(cause)); } while (0)
;
3399 }
3400 else if (PyExceptionInstance_Check(cause)((((cause)->ob_type)->tp_flags & ((1L<<30))) !=
0)
) {
3401 fixed_cause = cause;
3402 }
3403 else {
3404 PyErr_SetString(PyExc_TypeError,
3405 "exception causes must derive from "
3406 "BaseException");
3407 goto raise_error;
3408 }
3409 PyException_SetCause(value, fixed_cause);
3410 }
3411
3412 PyErr_SetObject(type, value);
3413 /* PyErr_SetObject incref's its arguments */
3414 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3414, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
;
3415 Py_XDECREF(type)do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3415, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0)
;
3416 return WHY_EXCEPTION;
3417
3418raise_error:
3419 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3419, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
;
3420 Py_XDECREF(type)do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3420, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0)
;
3421 Py_XDECREF(cause)do { if ((cause) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(cause))->ob_refcnt != 0) { if (((PyObject
*)cause)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3421, (PyObject *)(cause)); } else _Py_Dealloc((PyObject *)
(cause)); } while (0); } while (0)
;
3422 return WHY_EXCEPTION;
3423}
3424
3425/* Iterate v argcnt times and store the results on the stack (via decreasing
3426 sp). Return 1 for success, 0 if error.
3427
3428 If argcntafter == -1, do a simple unpack. If it is >= 0, do an unpack
3429 with a variable target.
3430*/
3431
3432static int
3433unpack_iterable(PyObject *v, int argcnt, int argcntafter, PyObject **sp)
3434{
3435 int i = 0, j = 0;
3436 Py_ssize_t ll = 0;
3437 PyObject *it; /* iter(v) */
3438 PyObject *w;
3439 PyObject *l = NULL((void*)0); /* variable list */
3440
3441 assert(v != NULL)(__builtin_expect(!(v != ((void*)0)), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 3441, "v != NULL") : (void)0)
;
3442
3443 it = PyObject_GetIter(v);
3444 if (it == NULL((void*)0))
3445 goto Error;
3446
3447 for (; i < argcnt; i++) {
3448 w = PyIter_Next(it);
3449 if (w == NULL((void*)0)) {
3450 /* Iterator done, via error or exhaustion. */
3451 if (!PyErr_Occurred()) {
3452 PyErr_Format(PyExc_ValueError,
3453 "need more than %d value%s to unpack",
3454 i, i == 1 ? "" : "s");
3455 }
3456 goto Error;
3457 }
3458 *--sp = w;
3459 }
3460
3461 if (argcntafter == -1) {
3462 /* We better have exhausted the iterator now. */
3463 w = PyIter_Next(it);
3464 if (w == NULL((void*)0)) {
3465 if (PyErr_Occurred())
3466 goto Error;
3467 Py_DECREF(it)do { if (_Py_RefTotal-- , --((PyObject*)(it))->ob_refcnt !=
0) { if (((PyObject*)it)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3467, (PyObject *)(it)); } else _Py_Dealloc
((PyObject *)(it)); } while (0)
;
3468 return 1;
3469 }
3470 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3470, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
3471 PyErr_Format(PyExc_ValueError, "too many values to unpack "
3472 "(expected %d)", argcnt);
3473 goto Error;
3474 }
3475
3476 l = PySequence_List(it);
3477 if (l == NULL((void*)0))
3478 goto Error;
3479 *--sp = l;
3480 i++;
3481
3482 ll = PyList_GET_SIZE(l)(((PyVarObject*)(l))->ob_size);
3483 if (ll < argcntafter) {
3484 PyErr_Format(PyExc_ValueError, "need more than %zd values to unpack",
3485 argcnt + ll);
3486 goto Error;
3487 }
3488
3489 /* Pop the "after-variable" args off the list. */
3490 for (j = argcntafter; j > 0; j--, i++) {
3491 *--sp = PyList_GET_ITEM(l, ll - j)(((PyListObject *)(l))->ob_item[ll - j]);
3492 }
3493 /* Resize the list. */
3494 Py_SIZE(l)(((PyVarObject*)(l))->ob_size) = ll - argcntafter;
3495 Py_DECREF(it)do { if (_Py_RefTotal-- , --((PyObject*)(it))->ob_refcnt !=
0) { if (((PyObject*)it)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3495, (PyObject *)(it)); } else _Py_Dealloc
((PyObject *)(it)); } while (0)
;
3496 return 1;
3497
3498Error:
3499 for (; i > 0; i--, sp++)
3500 Py_DECREF(*sp)do { if (_Py_RefTotal-- , --((PyObject*)(*sp))->ob_refcnt !=
0) { if (((PyObject*)*sp)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3500, (PyObject *)(*sp)); } else _Py_Dealloc
((PyObject *)(*sp)); } while (0)
;
3501 Py_XDECREF(it)do { if ((it) == ((void*)0)) ; else do { if (_Py_RefTotal-- ,
--((PyObject*)(it))->ob_refcnt != 0) { if (((PyObject*)it
)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3501, (PyObject *)(it)); } else _Py_Dealloc((PyObject *)(it
)); } while (0); } while (0)
;
3502 return 0;
3503}
3504
3505
3506#ifdef LLTRACE1
3507static int
3508prtrace(PyObject *v, char *str)
3509{
3510 printf("%s ", str);
3511 if (PyObject_Print(v, stdout__stdoutp, 0) != 0)
3512 PyErr_Clear(); /* Don't know what else to do */
3513 printf("\n");
3514 return 1;
3515}
3516#endif
3517
3518static void
3519call_exc_trace(Py_tracefunc func, PyObject *self, PyFrameObject *f)
3520{
3521 PyObject *type, *value, *traceback, *arg;
3522 int err;
3523 PyErr_Fetch(&type, &value, &traceback);
3524 if (value == NULL((void*)0)) {
3525 value = Py_None(&_Py_NoneStruct);
3526 Py_INCREF(value)( _Py_RefTotal++ , ((PyObject*)(value))->ob_refcnt++);
3527 }
3528 arg = PyTuple_Pack(3, type, value, traceback);
3529 if (arg == NULL((void*)0)) {
3530 PyErr_Restore(type, value, traceback);
3531 return;
3532 }
3533 err = call_trace(func, self, f, PyTrace_EXCEPTION1, arg);
3534 Py_DECREF(arg)do { if (_Py_RefTotal-- , --((PyObject*)(arg))->ob_refcnt !=
0) { if (((PyObject*)arg)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3534, (PyObject *)(arg)); } else _Py_Dealloc
((PyObject *)(arg)); } while (0)
;
3535 if (err == 0)
3536 PyErr_Restore(type, value, traceback);
3537 else {
3538 Py_XDECREF(type)do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3538, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0)
;
3539 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3539, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
;
3540 Py_XDECREF(traceback)do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3540, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0)
;
3541 }
3542}
3543
3544static int
3545call_trace_protected(Py_tracefunc func, PyObject *obj, PyFrameObject *frame,
3546 int what, PyObject *arg)
3547{
3548 PyObject *type, *value, *traceback;
3549 int err;
3550 PyErr_Fetch(&type, &value, &traceback);
3551 err = call_trace(func, obj, frame, what, arg);
3552 if (err == 0)
3553 {
3554 PyErr_Restore(type, value, traceback);
3555 return 0;
3556 }
3557 else {
3558 Py_XDECREF(type)do { if ((type) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(type))->ob_refcnt != 0) { if (((PyObject
*)type)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3558, (PyObject *)(type)); } else _Py_Dealloc((PyObject *)(
type)); } while (0); } while (0)
;
3559 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3559, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
;
3560 Py_XDECREF(traceback)do { if ((traceback) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(traceback))->ob_refcnt != 0) { if (((PyObject
*)traceback)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3560, (PyObject *)(traceback)); } else _Py_Dealloc((PyObject
*)(traceback)); } while (0); } while (0)
;
3561 return -1;
3562 }
3563}
3564
3565static int
3566call_trace(Py_tracefunc func, PyObject *obj, PyFrameObject *frame,
3567 int what, PyObject *arg)
3568{
3569 register PyThreadState *tstate = frame->f_tstate;
3570 int result;
3571 if (tstate->tracing)
3572 return 0;
3573 tstate->tracing++;
3574 tstate->use_tracing = 0;
3575 result = func(obj, frame, what, arg);
3576 tstate->use_tracing = ((tstate->c_tracefunc != NULL((void*)0))
3577 || (tstate->c_profilefunc != NULL((void*)0)));
3578 tstate->tracing--;
3579 return result;
3580}
3581
3582PyObject *
3583_PyEval_CallTracing(PyObject *func, PyObject *args)
3584{
3585 PyFrameObject *frame = PyEval_GetFrame();
3586 PyThreadState *tstate = frame->f_tstate;
3587 int save_tracing = tstate->tracing;
3588 int save_use_tracing = tstate->use_tracing;
3589 PyObject *result;
3590
3591 tstate->tracing = 0;
3592 tstate->use_tracing = ((tstate->c_tracefunc != NULL((void*)0))
3593 || (tstate->c_profilefunc != NULL((void*)0)));
3594 result = PyObject_Call(func, args, NULL((void*)0));
3595 tstate->tracing = save_tracing;
3596 tstate->use_tracing = save_use_tracing;
3597 return result;
3598}
3599
3600/* See Objects/lnotab_notes.txt for a description of how tracing works. */
3601static int
3602maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
3603 PyFrameObject *frame, int *instr_lb, int *instr_ub,
3604 int *instr_prev)
3605{
3606 int result = 0;
3607 int line = frame->f_lineno;
3608
3609 /* If the last instruction executed isn't in the current
3610 instruction window, reset the window.
3611 */
3612 if (frame->f_lasti < *instr_lb || frame->f_lasti >= *instr_ub) {
3613 PyAddrPair bounds;
3614 line = _PyCode_CheckLineNumber(frame->f_code, frame->f_lasti,
3615 &bounds);
3616 *instr_lb = bounds.ap_lower;
3617 *instr_ub = bounds.ap_upper;
3618 }
3619 /* If the last instruction falls at the start of a line or if
3620 it represents a jump backwards, update the frame's line
3621 number and call the trace function. */
3622 if (frame->f_lasti == *instr_lb || frame->f_lasti < *instr_prev) {
3623 frame->f_lineno = line;
3624 result = call_trace(func, obj, frame, PyTrace_LINE2, Py_None(&_Py_NoneStruct));
3625 }
3626 *instr_prev = frame->f_lasti;
3627 return result;
3628}
3629
3630void
3631PyEval_SetProfile(Py_tracefunc func, PyObject *arg)
3632{
3633 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3634 PyObject *temp = tstate->c_profileobj;
3635 Py_XINCREF(arg)do { if ((arg) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(arg))->ob_refcnt++); } while (0)
;
3636 tstate->c_profilefunc = NULL((void*)0);
3637 tstate->c_profileobj = NULL((void*)0);
3638 /* Must make sure that tracing is not ignored if 'temp' is freed */
3639 tstate->use_tracing = tstate->c_tracefunc != NULL((void*)0);
3640 Py_XDECREF(temp)do { if ((temp) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(temp))->ob_refcnt != 0) { if (((PyObject
*)temp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3640, (PyObject *)(temp)); } else _Py_Dealloc((PyObject *)(
temp)); } while (0); } while (0)
;
3641 tstate->c_profilefunc = func;
3642 tstate->c_profileobj = arg;
3643 /* Flag that tracing or profiling is turned on */
3644 tstate->use_tracing = (func != NULL((void*)0)) || (tstate->c_tracefunc != NULL((void*)0));
3645}
3646
3647void
3648PyEval_SetTrace(Py_tracefunc func, PyObject *arg)
3649{
3650 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3651 PyObject *temp = tstate->c_traceobj;
3652 _Py_TracingPossible += (func != NULL((void*)0)) - (tstate->c_tracefunc != NULL((void*)0));
3653 Py_XINCREF(arg)do { if ((arg) == ((void*)0)) ; else ( _Py_RefTotal++ , ((PyObject
*)(arg))->ob_refcnt++); } while (0)
;
3654 tstate->c_tracefunc = NULL((void*)0);
3655 tstate->c_traceobj = NULL((void*)0);
3656 /* Must make sure that profiling is not ignored if 'temp' is freed */
3657 tstate->use_tracing = tstate->c_profilefunc != NULL((void*)0);
3658 Py_XDECREF(temp)do { if ((temp) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(temp))->ob_refcnt != 0) { if (((PyObject
*)temp)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3658, (PyObject *)(temp)); } else _Py_Dealloc((PyObject *)(
temp)); } while (0); } while (0)
;
3659 tstate->c_tracefunc = func;
3660 tstate->c_traceobj = arg;
3661 /* Flag that tracing or profiling is turned on */
3662 tstate->use_tracing = ((func != NULL((void*)0))
3663 || (tstate->c_profilefunc != NULL((void*)0)));
3664}
3665
3666PyObject *
3667PyEval_GetBuiltins(void)
3668{
3669 PyFrameObject *current_frame = PyEval_GetFrame();
3670 if (current_frame == NULL((void*)0))
3671 return PyThreadState_GET()PyThreadState_Get()->interp->builtins;
3672 else
3673 return current_frame->f_builtins;
3674}
3675
3676PyObject *
3677PyEval_GetLocals(void)
3678{
3679 PyFrameObject *current_frame = PyEval_GetFrame();
3680 if (current_frame == NULL((void*)0))
3681 return NULL((void*)0);
3682 PyFrame_FastToLocals(current_frame);
3683 return current_frame->f_locals;
3684}
3685
3686PyObject *
3687PyEval_GetGlobals(void)
3688{
3689 PyFrameObject *current_frame = PyEval_GetFrame();
3690 if (current_frame == NULL((void*)0))
3691 return NULL((void*)0);
3692 else
3693 return current_frame->f_globals;
3694}
3695
3696PyFrameObject *
3697PyEval_GetFrame(void)
3698{
3699 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3700 return _PyThreadState_GetFrame(tstate);
3701}
3702
3703int
3704PyEval_MergeCompilerFlags(PyCompilerFlags *cf)
3705{
3706 PyFrameObject *current_frame = PyEval_GetFrame();
3707 int result = cf->cf_flags != 0;
3708
3709 if (current_frame != NULL((void*)0)) {
3710 const int codeflags = current_frame->f_code->co_flags;
3711 const int compilerflags = codeflags & PyCF_MASK(0x2000 | 0x4000 | 0x8000 | 0x10000 | 0x20000 | 0x40000);
3712 if (compilerflags) {
3713 result = 1;
3714 cf->cf_flags |= compilerflags;
3715 }
3716#if 0 /* future keyword */
3717 if (codeflags & CO_GENERATOR_ALLOWED) {
3718 result = 1;
3719 cf->cf_flags |= CO_GENERATOR_ALLOWED;
3720 }
3721#endif
3722 }
3723 return result;
3724}
3725
3726
3727/* External interface to call any callable object.
3728 The arg must be a tuple or NULL. The kw must be a dict or NULL. */
3729
3730PyObject *
3731PyEval_CallObjectWithKeywords(PyObject *func, PyObject *arg, PyObject *kw)
3732{
3733 PyObject *result;
3734
3735 if (arg == NULL((void*)0)) {
3736 arg = PyTuple_New(0);
3737 if (arg == NULL((void*)0))
3738 return NULL((void*)0);
3739 }
3740 else if (!PyTuple_Check(arg)((((((PyObject*)(arg))->ob_type))->tp_flags & ((1L<<
26))) != 0)
) {
3741 PyErr_SetString(PyExc_TypeError,
3742 "argument list must be a tuple");
3743 return NULL((void*)0);
3744 }
3745 else
3746 Py_INCREF(arg)( _Py_RefTotal++ , ((PyObject*)(arg))->ob_refcnt++);
3747
3748 if (kw != NULL((void*)0) && !PyDict_Check(kw)((((((PyObject*)(kw))->ob_type))->tp_flags & ((1L<<
29))) != 0)
) {
3749 PyErr_SetString(PyExc_TypeError,
3750 "keyword list must be a dictionary");
3751 Py_DECREF(arg)do { if (_Py_RefTotal-- , --((PyObject*)(arg))->ob_refcnt !=
0) { if (((PyObject*)arg)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3751, (PyObject *)(arg)); } else _Py_Dealloc
((PyObject *)(arg)); } while (0)
;
3752 return NULL((void*)0);
3753 }
3754
3755 result = PyObject_Call(func, arg, kw);
3756 Py_DECREF(arg)do { if (_Py_RefTotal-- , --((PyObject*)(arg))->ob_refcnt !=
0) { if (((PyObject*)arg)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3756, (PyObject *)(arg)); } else _Py_Dealloc
((PyObject *)(arg)); } while (0)
;
3757 return result;
3758}
3759
3760const char *
3761PyEval_GetFuncName(PyObject *func)
3762{
3763 if (PyMethod_Check(func)((func)->ob_type == &PyMethod_Type))
3764 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func)(((PyMethodObject *)func) -> im_func));
3765 else if (PyFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyFunction_Type))
3766 return _PyUnicode_AsString(((PyFunctionObject*)func)->func_name);
3767 else if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type))
3768 return ((PyCFunctionObject*)func)->m_ml->ml_name;
3769 else
3770 return func->ob_type->tp_name;
3771}
3772
3773const char *
3774PyEval_GetFuncDesc(PyObject *func)
3775{
3776 if (PyMethod_Check(func)((func)->ob_type == &PyMethod_Type))
3777 return "()";
3778 else if (PyFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyFunction_Type))
3779 return "()";
3780 else if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type))
3781 return "()";
3782 else
3783 return " object";
3784}
3785
3786static void
3787err_args(PyObject *func, int flags, int nargs)
3788{
3789 if (flags & METH_NOARGS0x0004)
3790 PyErr_Format(PyExc_TypeError,
3791 "%.200s() takes no arguments (%d given)",
3792 ((PyCFunctionObject *)func)->m_ml->ml_name,
3793 nargs);
3794 else
3795 PyErr_Format(PyExc_TypeError,
3796 "%.200s() takes exactly one argument (%d given)",
3797 ((PyCFunctionObject *)func)->m_ml->ml_name,
3798 nargs);
3799}
3800
3801#define C_TRACE(x, call)if (tstate->use_tracing && tstate->c_profilefunc
) { if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 4, func)) { x = ((void*)0); } else { x = call
; if (tstate->c_profilefunc != ((void*)0)) { if (x == ((void
*)0)) { call_trace_protected(tstate->c_profilefunc, tstate
->c_profileobj, tstate->frame, 5, func); } else { if (call_trace
(tstate->c_profilefunc, tstate->c_profileobj, tstate->
frame, 6, func)) { do { if (_Py_RefTotal-- , --((PyObject*)(x
))->ob_refcnt != 0) { if (((PyObject*)x)->ob_refcnt <
0) _Py_NegativeRefcount("Python/ceval.c", 3801, (PyObject *)
(x)); } else _Py_Dealloc((PyObject *)(x)); } while (0); x = (
(void*)0); } } } } } else { x = call; }
\
3802if (tstate->use_tracing && tstate->c_profilefunc) { \
3803 if (call_trace(tstate->c_profilefunc, \
3804 tstate->c_profileobj, \
3805 tstate->frame, PyTrace_C_CALL4, \
3806 func)) { \
3807 x = NULL((void*)0); \
3808 } \
3809 else { \
3810 x = call; \
3811 if (tstate->c_profilefunc != NULL((void*)0)) { \
3812 if (x == NULL((void*)0)) { \
3813 call_trace_protected(tstate->c_profilefunc, \
3814 tstate->c_profileobj, \
3815 tstate->frame, PyTrace_C_EXCEPTION5, \
3816 func); \
3817 /* XXX should pass (type, value, tb) */ \
3818 } else { \
3819 if (call_trace(tstate->c_profilefunc, \
3820 tstate->c_profileobj, \
3821 tstate->frame, PyTrace_C_RETURN6, \
3822 func)) { \
3823 Py_DECREF(x)do { if (_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt !=
0) { if (((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3823, (PyObject *)(x)); } else _Py_Dealloc
((PyObject *)(x)); } while (0)
; \
3824 x = NULL((void*)0); \
3825 } \
3826 } \
3827 } \
3828 } \
3829} else { \
3830 x = call; \
3831 }
3832
3833static PyObject *
3834call_function(PyObject ***pp_stack, int oparg
3835#ifdef WITH_TSC
3836 , uint64* pintr0, uint64* pintr1
3837#endif
3838 )
3839{
3840 int na = oparg & 0xff;
3841 int nk = (oparg>>8) & 0xff;
3842 int n = na + 2 * nk;
3843 PyObject **pfunc = (*pp_stack) - n - 1;
3844 PyObject *func = *pfunc;
3845 PyObject *x, *w;
3846
3847 /* Always dispatch PyCFunction first, because these are
3848 presumed to be the most frequent callable object.
3849 */
3850 if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type) && nk == 0) {
3851 int flags = PyCFunction_GET_FLAGS(func)(((PyCFunctionObject *)func) -> m_ml -> ml_flags);
3852 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3853
3854 PCALL(PCALL_CFUNCTION);
3855 if (flags & (METH_NOARGS0x0004 | METH_O0x0008)) {
3856 PyCFunction meth = PyCFunction_GET_FUNCTION(func)(((PyCFunctionObject *)func) -> m_ml -> ml_meth);
3857 PyObject *self = PyCFunction_GET_SELF(func)(((PyCFunctionObject *)func) -> m_self);
3858 if (flags & METH_NOARGS0x0004 && na == 0) {
3859 C_TRACE(x, (*meth)(self,NULL))if (tstate->use_tracing && tstate->c_profilefunc
) { if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 4, func)) { x = ((void*)0); } else { x = (
*meth)(self,((void*)0)); if (tstate->c_profilefunc != ((void
*)0)) { if (x == ((void*)0)) { call_trace_protected(tstate->
c_profilefunc, tstate->c_profileobj, tstate->frame, 5, func
); } else { if (call_trace(tstate->c_profilefunc, tstate->
c_profileobj, tstate->frame, 6, func)) { do { if (_Py_RefTotal
-- , --((PyObject*)(x))->ob_refcnt != 0) { if (((PyObject*
)x)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3859, (PyObject *)(x)); } else _Py_Dealloc((PyObject *)(x))
; } while (0); x = ((void*)0); } } } } } else { x = (*meth)(self
,((void*)0)); }
;
3860 }
3861 else if (flags & METH_O0x0008 && na == 1) {
3862 PyObject *arg = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
3863 C_TRACE(x, (*meth)(self,arg))if (tstate->use_tracing && tstate->c_profilefunc
) { if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 4, func)) { x = ((void*)0); } else { x = (
*meth)(self,arg); if (tstate->c_profilefunc != ((void*)0))
{ if (x == ((void*)0)) { call_trace_protected(tstate->c_profilefunc
, tstate->c_profileobj, tstate->frame, 5, func); } else
{ if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 6, func)) { do { if (_Py_RefTotal-- , --(
(PyObject*)(x))->ob_refcnt != 0) { if (((PyObject*)x)->
ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c", 3863
, (PyObject *)(x)); } else _Py_Dealloc((PyObject *)(x)); } while
(0); x = ((void*)0); } } } } } else { x = (*meth)(self,arg);
}
;
3864 Py_DECREF(arg)do { if (_Py_RefTotal-- , --((PyObject*)(arg))->ob_refcnt !=
0) { if (((PyObject*)arg)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3864, (PyObject *)(arg)); } else _Py_Dealloc
((PyObject *)(arg)); } while (0)
;
3865 }
3866 else {
3867 err_args(func, flags, na);
3868 x = NULL((void*)0);
3869 }
3870 }
3871 else {
3872 PyObject *callargs;
3873 callargs = load_args(pp_stack, na);
3874 READ_TIMESTAMP(*pintr0);
3875 C_TRACE(x, PyCFunction_Call(func,callargs,NULL))if (tstate->use_tracing && tstate->c_profilefunc
) { if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 4, func)) { x = ((void*)0); } else { x = PyCFunction_Call
(func,callargs,((void*)0)); if (tstate->c_profilefunc != (
(void*)0)) { if (x == ((void*)0)) { call_trace_protected(tstate
->c_profilefunc, tstate->c_profileobj, tstate->frame
, 5, func); } else { if (call_trace(tstate->c_profilefunc,
tstate->c_profileobj, tstate->frame, 6, func)) { do { if
(_Py_RefTotal-- , --((PyObject*)(x))->ob_refcnt != 0) { if
(((PyObject*)x)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3875, (PyObject *)(x)); } else _Py_Dealloc((PyObject *)(x))
; } while (0); x = ((void*)0); } } } } } else { x = PyCFunction_Call
(func,callargs,((void*)0)); }
;
3876 READ_TIMESTAMP(*pintr1);
3877 Py_XDECREF(callargs)do { if ((callargs) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(callargs))->ob_refcnt != 0) { if (((PyObject
*)callargs)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 3877, (PyObject *)(callargs)); } else _Py_Dealloc((PyObject
*)(callargs)); } while (0); } while (0)
;
3878 }
3879 } else {
3880 if (PyMethod_Check(func)((func)->ob_type == &PyMethod_Type) && PyMethod_GET_SELF(func)(((PyMethodObject *)func) -> im_self) != NULL((void*)0)) {
3881 /* optimize access to bound methods */
3882 PyObject *self = PyMethod_GET_SELF(func)(((PyMethodObject *)func) -> im_self);
3883 PCALL(PCALL_METHOD);
3884 PCALL(PCALL_BOUND_METHOD);
3885 Py_INCREF(self)( _Py_RefTotal++ , ((PyObject*)(self))->ob_refcnt++);
3886 func = PyMethod_GET_FUNCTION(func)(((PyMethodObject *)func) -> im_func);
3887 Py_INCREF(func)( _Py_RefTotal++ , ((PyObject*)(func))->ob_refcnt++);
3888 Py_DECREF(*pfunc)do { if (_Py_RefTotal-- , --((PyObject*)(*pfunc))->ob_refcnt
!= 0) { if (((PyObject*)*pfunc)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3888, (PyObject *)(*pfunc)); } else _Py_Dealloc
((PyObject *)(*pfunc)); } while (0)
;
3889 *pfunc = self;
3890 na++;
3891 n++;
3892 } else
3893 Py_INCREF(func)( _Py_RefTotal++ , ((PyObject*)(func))->ob_refcnt++);
3894 READ_TIMESTAMP(*pintr0);
3895 if (PyFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyFunction_Type))
3896 x = fast_function(func, pp_stack, n, na, nk);
3897 else
3898 x = do_call(func, pp_stack, na, nk);
3899 READ_TIMESTAMP(*pintr1);
3900 Py_DECREF(func)do { if (_Py_RefTotal-- , --((PyObject*)(func))->ob_refcnt
!= 0) { if (((PyObject*)func)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3900, (PyObject *)(func)); } else _Py_Dealloc
((PyObject *)(func)); } while (0)
;
3901 }
3902
3903 /* Clear the stack of the function object. Also removes
3904 the arguments in case they weren't consumed already
3905 (fast_function() and err_args() leave them on the stack).
3906 */
3907 while ((*pp_stack) > pfunc) {
3908 w = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
3909 Py_DECREF(w)do { if (_Py_RefTotal-- , --((PyObject*)(w))->ob_refcnt !=
0) { if (((PyObject*)w)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3909, (PyObject *)(w)); } else _Py_Dealloc
((PyObject *)(w)); } while (0)
;
3910 PCALL(PCALL_POP);
3911 }
3912 return x;
3913}
3914
3915/* The fast_function() function optimize calls for which no argument
3916 tuple is necessary; the objects are passed directly from the stack.
3917 For the simplest case -- a function that takes only positional
3918 arguments and is called with only positional arguments -- it
3919 inlines the most primitive frame setup code from
3920 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
3921 done before evaluating the frame.
3922*/
3923
3924static PyObject *
3925fast_function(PyObject *func, PyObject ***pp_stack, int n, int na, int nk)
3926{
3927 PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func)(((PyFunctionObject *)func) -> func_code);
3928 PyObject *globals = PyFunction_GET_GLOBALS(func)(((PyFunctionObject *)func) -> func_globals);
3929 PyObject *argdefs = PyFunction_GET_DEFAULTS(func)(((PyFunctionObject *)func) -> func_defaults);
3930 PyObject *kwdefs = PyFunction_GET_KW_DEFAULTS(func)(((PyFunctionObject *)func) -> func_kwdefaults);
3931 PyObject **d = NULL((void*)0);
3932 int nd = 0;
3933
3934 PCALL(PCALL_FUNCTION);
3935 PCALL(PCALL_FAST_FUNCTION);
3936 if (argdefs == NULL((void*)0) && co->co_argcount == n &&
3937 co->co_kwonlyargcount == 0 && nk==0 &&
3938 co->co_flags == (CO_OPTIMIZED0x0001 | CO_NEWLOCALS0x0002 | CO_NOFREE0x0040)) {
3939 PyFrameObject *f;
3940 PyObject *retval = NULL((void*)0);
3941 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
3942 PyObject **fastlocals, **stack;
3943 int i;
3944
3945 PCALL(PCALL_FASTER_FUNCTION);
3946 assert(globals != NULL)(__builtin_expect(!(globals != ((void*)0)), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 3946, "globals != NULL") : (void
)0)
;
3947 /* XXX Perhaps we should create a specialized
3948 PyFrame_New() that doesn't take locals, but does
3949 take builtins without sanity checking them.
3950 */
3951 assert(tstate != NULL)(__builtin_expect(!(tstate != ((void*)0)), 0) ? __assert_rtn(
__func__, "Python/ceval.c", 3951, "tstate != NULL") : (void)0
)
;
3952 f = PyFrame_New(tstate, co, globals, NULL((void*)0));
3953 if (f == NULL((void*)0))
3954 return NULL((void*)0);
3955
3956 fastlocals = f->f_localsplus;
3957 stack = (*pp_stack) - n;
3958
3959 for (i = 0; i < n; i++) {
3960 Py_INCREF(*stack)( _Py_RefTotal++ , ((PyObject*)(*stack))->ob_refcnt++);
3961 fastlocals[i] = *stack++;
3962 }
3963 retval = PyEval_EvalFrameEx(f,0);
3964 ++tstate->recursion_depth;
3965 Py_DECREF(f)do { if (_Py_RefTotal-- , --((PyObject*)(f))->ob_refcnt !=
0) { if (((PyObject*)f)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3965, (PyObject *)(f)); } else _Py_Dealloc
((PyObject *)(f)); } while (0)
;
3966 --tstate->recursion_depth;
3967 return retval;
3968 }
3969 if (argdefs != NULL((void*)0)) {
3970 d = &PyTuple_GET_ITEM(argdefs, 0)(((PyTupleObject *)(argdefs))->ob_item[0]);
3971 nd = Py_SIZE(argdefs)(((PyVarObject*)(argdefs))->ob_size);
3972 }
3973 return PyEval_EvalCodeEx((PyObject*)co, globals,
3974 (PyObject *)NULL((void*)0), (*pp_stack)-n, na,
3975 (*pp_stack)-2*nk, nk, d, nd, kwdefs,
3976 PyFunction_GET_CLOSURE(func)(((PyFunctionObject *)func) -> func_closure));
3977}
3978
3979static PyObject *
3980update_keyword_args(PyObject *orig_kwdict, int nk, PyObject ***pp_stack,
3981 PyObject *func)
3982{
3983 PyObject *kwdict = NULL((void*)0);
3984 if (orig_kwdict == NULL((void*)0))
3985 kwdict = PyDict_New();
3986 else {
3987 kwdict = PyDict_Copy(orig_kwdict);
3988 Py_DECREF(orig_kwdict)do { if (_Py_RefTotal-- , --((PyObject*)(orig_kwdict))->ob_refcnt
!= 0) { if (((PyObject*)orig_kwdict)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 3988, (PyObject *)(orig_kwdict)); } else _Py_Dealloc
((PyObject *)(orig_kwdict)); } while (0)
;
3989 }
3990 if (kwdict == NULL((void*)0))
3991 return NULL((void*)0);
3992 while (--nk >= 0) {
3993 int err;
3994 PyObject *value = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
3995 PyObject *key = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
3996 if (PyDict_GetItem(kwdict, key) != NULL((void*)0)) {
3997 PyErr_Format(PyExc_TypeError,
3998 "%.200s%s got multiple values "
3999 "for keyword argument '%U'",
4000 PyEval_GetFuncName(func),
4001 PyEval_GetFuncDesc(func),
4002 key);
4003 Py_DECREF(key)do { if (_Py_RefTotal-- , --((PyObject*)(key))->ob_refcnt !=
0) { if (((PyObject*)key)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4003, (PyObject *)(key)); } else _Py_Dealloc
((PyObject *)(key)); } while (0)
;
4004 Py_DECREF(value)do { if (_Py_RefTotal-- , --((PyObject*)(value))->ob_refcnt
!= 0) { if (((PyObject*)value)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4004, (PyObject *)(value)); } else _Py_Dealloc
((PyObject *)(value)); } while (0)
;
4005 Py_DECREF(kwdict)do { if (_Py_RefTotal-- , --((PyObject*)(kwdict))->ob_refcnt
!= 0) { if (((PyObject*)kwdict)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4005, (PyObject *)(kwdict)); } else _Py_Dealloc
((PyObject *)(kwdict)); } while (0)
;
4006 return NULL((void*)0);
4007 }
4008 err = PyDict_SetItem(kwdict, key, value);
4009 Py_DECREF(key)do { if (_Py_RefTotal-- , --((PyObject*)(key))->ob_refcnt !=
0) { if (((PyObject*)key)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4009, (PyObject *)(key)); } else _Py_Dealloc
((PyObject *)(key)); } while (0)
;
4010 Py_DECREF(value)do { if (_Py_RefTotal-- , --((PyObject*)(value))->ob_refcnt
!= 0) { if (((PyObject*)value)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4010, (PyObject *)(value)); } else _Py_Dealloc
((PyObject *)(value)); } while (0)
;
4011 if (err) {
4012 Py_DECREF(kwdict)do { if (_Py_RefTotal-- , --((PyObject*)(kwdict))->ob_refcnt
!= 0) { if (((PyObject*)kwdict)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4012, (PyObject *)(kwdict)); } else _Py_Dealloc
((PyObject *)(kwdict)); } while (0)
;
4013 return NULL((void*)0);
4014 }
4015 }
4016 return kwdict;
4017}
4018
4019static PyObject *
4020update_star_args(int nstack, int nstar, PyObject *stararg,
4021 PyObject ***pp_stack)
4022{
4023 PyObject *callargs, *w;
4024
4025 callargs = PyTuple_New(nstack + nstar);
4026 if (callargs == NULL((void*)0)) {
4027 return NULL((void*)0);
4028 }
4029 if (nstar) {
4030 int i;
4031 for (i = 0; i < nstar; i++) {
4032 PyObject *a = PyTuple_GET_ITEM(stararg, i)(((PyTupleObject *)(stararg))->ob_item[i]);
4033 Py_INCREF(a)( _Py_RefTotal++ , ((PyObject*)(a))->ob_refcnt++);
4034 PyTuple_SET_ITEM(callargs, nstack + i, a)(((PyTupleObject *)(callargs))->ob_item[nstack + i] = a);
4035 }
4036 }
4037 while (--nstack >= 0) {
4038 w = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
4039 PyTuple_SET_ITEM(callargs, nstack, w)(((PyTupleObject *)(callargs))->ob_item[nstack] = w);
4040 }
4041 return callargs;
4042}
4043
4044static PyObject *
4045load_args(PyObject ***pp_stack, int na)
4046{
4047 PyObject *args = PyTuple_New(na);
4048 PyObject *w;
4049
4050 if (args == NULL((void*)0))
4051 return NULL((void*)0);
4052 while (--na >= 0) {
4053 w = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
4054 PyTuple_SET_ITEM(args, na, w)(((PyTupleObject *)(args))->ob_item[na] = w);
4055 }
4056 return args;
4057}
4058
4059static PyObject *
4060do_call(PyObject *func, PyObject ***pp_stack, int na, int nk)
4061{
4062 PyObject *callargs = NULL((void*)0);
4063 PyObject *kwdict = NULL((void*)0);
4064 PyObject *result = NULL((void*)0);
4065
4066 if (nk > 0) {
4067 kwdict = update_keyword_args(NULL((void*)0), nk, pp_stack, func);
4068 if (kwdict == NULL((void*)0))
4069 goto call_fail;
4070 }
4071 callargs = load_args(pp_stack, na);
4072 if (callargs == NULL((void*)0))
4073 goto call_fail;
4074#ifdef CALL_PROFILE
4075 /* At this point, we have to look at the type of func to
4076 update the call stats properly. Do it here so as to avoid
4077 exposing the call stats machinery outside ceval.c
4078 */
4079 if (PyFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyFunction_Type))
4080 PCALL(PCALL_FUNCTION);
4081 else if (PyMethod_Check(func)((func)->ob_type == &PyMethod_Type))
4082 PCALL(PCALL_METHOD);
4083 else if (PyType_Check(func)((((((PyObject*)(func))->ob_type))->tp_flags & ((1L
<<31))) != 0)
)
4084 PCALL(PCALL_TYPE);
4085 else if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type))
4086 PCALL(PCALL_CFUNCTION);
4087 else
4088 PCALL(PCALL_OTHER);
4089#endif
4090 if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type)) {
4091 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
4092 C_TRACE(result, PyCFunction_Call(func, callargs, kwdict))if (tstate->use_tracing && tstate->c_profilefunc
) { if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 4, func)) { result = ((void*)0); } else {
result = PyCFunction_Call(func, callargs, kwdict); if (tstate
->c_profilefunc != ((void*)0)) { if (result == ((void*)0))
{ call_trace_protected(tstate->c_profilefunc, tstate->
c_profileobj, tstate->frame, 5, func); } else { if (call_trace
(tstate->c_profilefunc, tstate->c_profileobj, tstate->
frame, 6, func)) { do { if (_Py_RefTotal-- , --((PyObject*)(result
))->ob_refcnt != 0) { if (((PyObject*)result)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 4092, (PyObject
*)(result)); } else _Py_Dealloc((PyObject *)(result)); } while
(0); result = ((void*)0); } } } } } else { result = PyCFunction_Call
(func, callargs, kwdict); }
;
4093 }
4094 else
4095 result = PyObject_Call(func, callargs, kwdict);
4096call_fail:
4097 Py_XDECREF(callargs)do { if ((callargs) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(callargs))->ob_refcnt != 0) { if (((PyObject
*)callargs)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 4097, (PyObject *)(callargs)); } else _Py_Dealloc((PyObject
*)(callargs)); } while (0); } while (0)
;
4098 Py_XDECREF(kwdict)do { if ((kwdict) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(kwdict))->ob_refcnt != 0) { if (((PyObject
*)kwdict)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 4098, (PyObject *)(kwdict)); } else _Py_Dealloc((PyObject *
)(kwdict)); } while (0); } while (0)
;
4099 return result;
4100}
4101
4102static PyObject *
4103ext_do_call(PyObject *func, PyObject ***pp_stack, int flags, int na, int nk)
4104{
4105 int nstar = 0;
4106 PyObject *callargs = NULL((void*)0);
4107 PyObject *stararg = NULL((void*)0);
4108 PyObject *kwdict = NULL((void*)0);
4109 PyObject *result = NULL((void*)0);
4110
4111 if (flags & CALL_FLAG_KW2) {
4112 kwdict = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
4113 if (!PyDict_Check(kwdict)((((((PyObject*)(kwdict))->ob_type))->tp_flags & ((
1L<<29))) != 0)
) {
4114 PyObject *d;
4115 d = PyDict_New();
4116 if (d == NULL((void*)0))
4117 goto ext_call_fail;
4118 if (PyDict_Update(d, kwdict) != 0) {
4119 Py_DECREF(d)do { if (_Py_RefTotal-- , --((PyObject*)(d))->ob_refcnt !=
0) { if (((PyObject*)d)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4119, (PyObject *)(d)); } else _Py_Dealloc
((PyObject *)(d)); } while (0)
;
4120 /* PyDict_Update raises attribute
4121 * error (percolated from an attempt
4122 * to get 'keys' attribute) instead of
4123 * a type error if its second argument
4124 * is not a mapping.
4125 */
4126 if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
4127 PyErr_Format(PyExc_TypeError,
4128 "%.200s%.200s argument after ** "
4129 "must be a mapping, not %.200s",
4130 PyEval_GetFuncName(func),
4131 PyEval_GetFuncDesc(func),
4132 kwdict->ob_type->tp_name);
4133 }
4134 goto ext_call_fail;
4135 }
4136 Py_DECREF(kwdict)do { if (_Py_RefTotal-- , --((PyObject*)(kwdict))->ob_refcnt
!= 0) { if (((PyObject*)kwdict)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4136, (PyObject *)(kwdict)); } else _Py_Dealloc
((PyObject *)(kwdict)); } while (0)
;
4137 kwdict = d;
4138 }
4139 }
4140 if (flags & CALL_FLAG_VAR1) {
4141 stararg = EXT_POP(*pp_stack)((void)(lltrace && prtrace((*pp_stack)[-1], "ext_pop"
)), *--(*pp_stack))
;
4142 if (!PyTuple_Check(stararg)((((((PyObject*)(stararg))->ob_type))->tp_flags & (
(1L<<26))) != 0)
) {
4143 PyObject *t = NULL((void*)0);
4144 t = PySequence_Tuple(stararg);
4145 if (t == NULL((void*)0)) {
4146 if (PyErr_ExceptionMatches(PyExc_TypeError)) {
4147 PyErr_Format(PyExc_TypeError,
4148 "%.200s%.200s argument after * "
4149 "must be a sequence, not %200s",
4150 PyEval_GetFuncName(func),
4151 PyEval_GetFuncDesc(func),
4152 stararg->ob_type->tp_name);
4153 }
4154 goto ext_call_fail;
4155 }
4156 Py_DECREF(stararg)do { if (_Py_RefTotal-- , --((PyObject*)(stararg))->ob_refcnt
!= 0) { if (((PyObject*)stararg)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4156, (PyObject *)(stararg)); } else _Py_Dealloc
((PyObject *)(stararg)); } while (0)
;
4157 stararg = t;
4158 }
4159 nstar = PyTuple_GET_SIZE(stararg)(((PyVarObject*)(stararg))->ob_size);
4160 }
4161 if (nk > 0) {
4162 kwdict = update_keyword_args(kwdict, nk, pp_stack, func);
4163 if (kwdict == NULL((void*)0))
4164 goto ext_call_fail;
4165 }
4166 callargs = update_star_args(na, nstar, stararg, pp_stack);
4167 if (callargs == NULL((void*)0))
4168 goto ext_call_fail;
4169#ifdef CALL_PROFILE
4170 /* At this point, we have to look at the type of func to
4171 update the call stats properly. Do it here so as to avoid
4172 exposing the call stats machinery outside ceval.c
4173 */
4174 if (PyFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyFunction_Type))
4175 PCALL(PCALL_FUNCTION);
4176 else if (PyMethod_Check(func)((func)->ob_type == &PyMethod_Type))
4177 PCALL(PCALL_METHOD);
4178 else if (PyType_Check(func)((((((PyObject*)(func))->ob_type))->tp_flags & ((1L
<<31))) != 0)
)
4179 PCALL(PCALL_TYPE);
4180 else if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type))
4181 PCALL(PCALL_CFUNCTION);
4182 else
4183 PCALL(PCALL_OTHER);
4184#endif
4185 if (PyCFunction_Check(func)((((PyObject*)(func))->ob_type) == &PyCFunction_Type)) {
4186 PyThreadState *tstate = PyThreadState_GET()PyThreadState_Get();
4187 C_TRACE(result, PyCFunction_Call(func, callargs, kwdict))if (tstate->use_tracing && tstate->c_profilefunc
) { if (call_trace(tstate->c_profilefunc, tstate->c_profileobj
, tstate->frame, 4, func)) { result = ((void*)0); } else {
result = PyCFunction_Call(func, callargs, kwdict); if (tstate
->c_profilefunc != ((void*)0)) { if (result == ((void*)0))
{ call_trace_protected(tstate->c_profilefunc, tstate->
c_profileobj, tstate->frame, 5, func); } else { if (call_trace
(tstate->c_profilefunc, tstate->c_profileobj, tstate->
frame, 6, func)) { do { if (_Py_RefTotal-- , --((PyObject*)(result
))->ob_refcnt != 0) { if (((PyObject*)result)->ob_refcnt
< 0) _Py_NegativeRefcount("Python/ceval.c", 4187, (PyObject
*)(result)); } else _Py_Dealloc((PyObject *)(result)); } while
(0); result = ((void*)0); } } } } } else { result = PyCFunction_Call
(func, callargs, kwdict); }
;
4188 }
4189 else
4190 result = PyObject_Call(func, callargs, kwdict);
4191ext_call_fail:
4192 Py_XDECREF(callargs)do { if ((callargs) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(callargs))->ob_refcnt != 0) { if (((PyObject
*)callargs)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 4192, (PyObject *)(callargs)); } else _Py_Dealloc((PyObject
*)(callargs)); } while (0); } while (0)
;
4193 Py_XDECREF(kwdict)do { if ((kwdict) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(kwdict))->ob_refcnt != 0) { if (((PyObject
*)kwdict)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 4193, (PyObject *)(kwdict)); } else _Py_Dealloc((PyObject *
)(kwdict)); } while (0); } while (0)
;
4194 Py_XDECREF(stararg)do { if ((stararg) == ((void*)0)) ; else do { if (_Py_RefTotal
-- , --((PyObject*)(stararg))->ob_refcnt != 0) { if (((PyObject
*)stararg)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 4194, (PyObject *)(stararg)); } else _Py_Dealloc((PyObject *
)(stararg)); } while (0); } while (0)
;
4195 return result;
4196}
4197
4198/* Extract a slice index from a PyInt or PyLong or an object with the
4199 nb_index slot defined, and store in *pi.
4200 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4201 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4202 Return 0 on error, 1 on success.
4203*/
4204/* Note: If v is NULL, return success without storing into *pi. This
4205 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4206 called by the SLICE opcode with v and/or w equal to NULL.
4207*/
4208int
4209_PyEval_SliceIndex(PyObject *v, Py_ssize_t *pi)
4210{
4211 if (v != NULL((void*)0)) {
4212 Py_ssize_t x;
4213 if (PyIndex_Check(v)((v)->ob_type->tp_as_number != ((void*)0) && (v
)->ob_type->tp_as_number->nb_index != ((void*)0))
) {
4214 x = PyNumber_AsSsize_t(v, NULL((void*)0));
4215 if (x == -1 && PyErr_Occurred())
4216 return 0;
4217 }
4218 else {
4219 PyErr_SetString(PyExc_TypeError,
4220 "slice indices must be integers or "
4221 "None or have an __index__ method");
4222 return 0;
4223 }
4224 *pi = x;
4225 }
4226 return 1;
4227}
4228
4229#define CANNOT_CATCH_MSG"catching classes that do not inherit from " "BaseException is not allowed" "catching classes that do not inherit from "\
4230 "BaseException is not allowed"
4231
4232static PyObject *
4233cmp_outcome(int op, register PyObject *v, register PyObject *w)
4234{
4235 int res = 0;
4236 switch (op) {
4237 case PyCmp_IS:
4238 res = (v == w);
4239 break;
4240 case PyCmp_IS_NOT:
4241 res = (v != w);
4242 break;
4243 case PyCmp_IN:
4244 res = PySequence_Contains(w, v);
4245 if (res < 0)
4246 return NULL((void*)0);
4247 break;
4248 case PyCmp_NOT_IN:
4249 res = PySequence_Contains(w, v);
4250 if (res < 0)
4251 return NULL((void*)0);
4252 res = !res;
4253 break;
4254 case PyCmp_EXC_MATCH:
4255 if (PyTuple_Check(w)((((((PyObject*)(w))->ob_type))->tp_flags & ((1L<<
26))) != 0)
) {
4256 Py_ssize_t i, length;
4257 length = PyTuple_Size(w);
4258 for (i = 0; i < length; i += 1) {
4259 PyObject *exc = PyTuple_GET_ITEM(w, i)(((PyTupleObject *)(w))->ob_item[i]);
4260 if (!PyExceptionClass_Check(exc)(((((((PyObject*)((exc)))->ob_type))->tp_flags & ((
1L<<31))) != 0) && ((((PyTypeObject*)(exc))->
tp_flags & ((1L<<30))) != 0))
) {
4261 PyErr_SetString(PyExc_TypeError,
4262 CANNOT_CATCH_MSG"catching classes that do not inherit from " "BaseException is not allowed");
4263 return NULL((void*)0);
4264 }
4265 }
4266 }
4267 else {
4268 if (!PyExceptionClass_Check(w)(((((((PyObject*)((w)))->ob_type))->tp_flags & ((1L
<<31))) != 0) && ((((PyTypeObject*)(w))->tp_flags
& ((1L<<30))) != 0))
) {
4269 PyErr_SetString(PyExc_TypeError,
4270 CANNOT_CATCH_MSG"catching classes that do not inherit from " "BaseException is not allowed");
4271 return NULL((void*)0);
4272 }
4273 }
4274 res = PyErr_GivenExceptionMatches(v, w);
4275 break;
4276 default:
4277 return PyObject_RichCompare(v, w, op);
4278 }
4279 v = res ? Py_True((PyObject *) &_Py_TrueStruct) : Py_False((PyObject *) &_Py_FalseStruct);
4280 Py_INCREF(v)( _Py_RefTotal++ , ((PyObject*)(v))->ob_refcnt++);
4281 return v;
4282}
4283
4284static PyObject *
4285import_from(PyObject *v, PyObject *name)
4286{
4287 PyObject *x;
4288
4289 x = PyObject_GetAttr(v, name);
4290 if (x == NULL((void*)0) && PyErr_ExceptionMatches(PyExc_AttributeError)) {
4291 PyErr_Format(PyExc_ImportError, "cannot import name %S", name);
4292 }
4293 return x;
4294}
4295
4296static int
4297import_all_from(PyObject *locals, PyObject *v)
4298{
4299 PyObject *all = PyObject_GetAttrString(v, "__all__");
4300 PyObject *dict, *name, *value;
4301 int skip_leading_underscores = 0;
4302 int pos, err;
4303
4304 if (all == NULL((void*)0)) {
4305 if (!PyErr_ExceptionMatches(PyExc_AttributeError))
4306 return -1; /* Unexpected error */
4307 PyErr_Clear();
4308 dict = PyObject_GetAttrString(v, "__dict__");
4309 if (dict == NULL((void*)0)) {
4310 if (!PyErr_ExceptionMatches(PyExc_AttributeError))
4311 return -1;
4312 PyErr_SetString(PyExc_ImportError,
4313 "from-import-* object has no __dict__ and no __all__");
4314 return -1;
4315 }
4316 all = PyMapping_Keys(dict);
4317 Py_DECREF(dict)do { if (_Py_RefTotal-- , --((PyObject*)(dict))->ob_refcnt
!= 0) { if (((PyObject*)dict)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4317, (PyObject *)(dict)); } else _Py_Dealloc
((PyObject *)(dict)); } while (0)
;
4318 if (all == NULL((void*)0))
4319 return -1;
4320 skip_leading_underscores = 1;
4321 }
4322
4323 for (pos = 0, err = 0; ; pos++) {
4324 name = PySequence_GetItem(all, pos);
4325 if (name == NULL((void*)0)) {
4326 if (!PyErr_ExceptionMatches(PyExc_IndexError))
4327 err = -1;
4328 else
4329 PyErr_Clear();
4330 break;
4331 }
4332 if (skip_leading_underscores &&
4333 PyUnicode_Check(name)((((((PyObject*)(name))->ob_type))->tp_flags & ((1L
<<28))) != 0)
&&
4334 PyUnicode_AS_UNICODE(name)((__builtin_expect(!(((((((PyObject*)(name))->ob_type))->
tp_flags & ((1L<<28))) != 0)), 0) ? __assert_rtn(__func__
, "Python/ceval.c", 4334, "PyUnicode_Check(name)") : (void)0)
,(((PyUnicodeObject *)(name))->str))
[0] == '_')
4335 {
4336 Py_DECREF(name)do { if (_Py_RefTotal-- , --((PyObject*)(name))->ob_refcnt
!= 0) { if (((PyObject*)name)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4336, (PyObject *)(name)); } else _Py_Dealloc
((PyObject *)(name)); } while (0)
;
4337 continue;
4338 }
4339 value = PyObject_GetAttr(v, name);
4340 if (value == NULL((void*)0))
4341 err = -1;
4342 else if (PyDict_CheckExact(locals)((((PyObject*)(locals))->ob_type) == &PyDict_Type))
4343 err = PyDict_SetItem(locals, name, value);
4344 else
4345 err = PyObject_SetItem(locals, name, value);
4346 Py_DECREF(name)do { if (_Py_RefTotal-- , --((PyObject*)(name))->ob_refcnt
!= 0) { if (((PyObject*)name)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4346, (PyObject *)(name)); } else _Py_Dealloc
((PyObject *)(name)); } while (0)
;
4347 Py_XDECREF(value)do { if ((value) == ((void*)0)) ; else do { if (_Py_RefTotal--
, --((PyObject*)(value))->ob_refcnt != 0) { if (((PyObject
*)value)->ob_refcnt < 0) _Py_NegativeRefcount("Python/ceval.c"
, 4347, (PyObject *)(value)); } else _Py_Dealloc((PyObject *)
(value)); } while (0); } while (0)
;
4348 if (err != 0)
4349 break;
4350 }
4351 Py_DECREF(all)do { if (_Py_RefTotal-- , --((PyObject*)(all))->ob_refcnt !=
0) { if (((PyObject*)all)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4351, (PyObject *)(all)); } else _Py_Dealloc
((PyObject *)(all)); } while (0)
;
4352 return err;
4353}
4354
4355static void
4356format_exc_check_arg(PyObject *exc, const char *format_str, PyObject *obj)
4357{
4358 const char *obj_str;
4359
4360 if (!obj)
4361 return;
4362
4363 obj_str = _PyUnicode_AsString(obj);
4364 if (!obj_str)
4365 return;
4366
4367 PyErr_Format(exc, format_str, obj_str);
4368}
4369
4370static void
4371format_exc_unbound(PyCodeObject *co, int oparg)
4372{
4373 PyObject *name;
4374 /* Don't stomp existing exception */
4375 if (PyErr_Occurred())
4376 return;
4377 if (oparg < PyTuple_GET_SIZE(co->co_cellvars)(((PyVarObject*)(co->co_cellvars))->ob_size)) {
4378 name = PyTuple_GET_ITEM(co->co_cellvars,(((PyTupleObject *)(co->co_cellvars))->ob_item[oparg])
4379 oparg)(((PyTupleObject *)(co->co_cellvars))->ob_item[oparg]);
4380 format_exc_check_arg(
4381 PyExc_UnboundLocalError,
4382 UNBOUNDLOCAL_ERROR_MSG"local variable '%.200s' referenced before assignment",
4383 name);
4384 } else {
4385 name = PyTuple_GET_ITEM(co->co_freevars, oparg -(((PyTupleObject *)(co->co_freevars))->ob_item[oparg - (
((PyVarObject*)(co->co_cellvars))->ob_size)])
4386 PyTuple_GET_SIZE(co->co_cellvars))(((PyTupleObject *)(co->co_freevars))->ob_item[oparg - (
((PyVarObject*)(co->co_cellvars))->ob_size)])
;
4387 format_exc_check_arg(PyExc_NameError,
4388 UNBOUNDFREE_ERROR_MSG"free variable '%.200s' referenced before assignment" " in enclosing scope", name);
4389 }
4390}
4391
4392static PyObject *
4393unicode_concatenate(PyObject *v, PyObject *w,
4394 PyFrameObject *f, unsigned char *next_instr)
4395{
4396 /* This function implements 'variable += expr' when both arguments
4397 are (Unicode) strings. */
4398 Py_ssize_t v_len = PyUnicode_GET_SIZE(v)((__builtin_expect(!(((((((PyObject*)(v))->ob_type))->tp_flags
& ((1L<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 4398, "PyUnicode_Check(v)") : (void)0),(((PyUnicodeObject *
)(v))->length))
;
4399 Py_ssize_t w_len = PyUnicode_GET_SIZE(w)((__builtin_expect(!(((((((PyObject*)(w))->ob_type))->tp_flags
& ((1L<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 4399, "PyUnicode_Check(w)") : (void)0),(((PyUnicodeObject *
)(w))->length))
;
4400 Py_ssize_t new_len = v_len + w_len;
4401 if (new_len < 0) {
4402 PyErr_SetString(PyExc_OverflowError,
4403 "strings are too large to concat");
4404 return NULL((void*)0);
4405 }
4406
4407 if (Py_REFCNT(v)(((PyObject*)(v))->ob_refcnt) == 2) {
4408 /* In the common case, there are 2 references to the value
4409 * stored in 'variable' when the += is performed: one on the
4410 * value stack (in 'v') and one still stored in the
4411 * 'variable'. We try to delete the variable now to reduce
4412 * the refcnt to 1.
4413 */
4414 switch (*next_instr) {
4415 case STORE_FAST125:
4416 {
4417 int oparg = PEEKARG()((next_instr[2]<<8) + next_instr[1]);
4418 PyObject **fastlocals = f->f_localsplus;
4419 if (GETLOCAL(oparg)(fastlocals[oparg]) == v)
4420 SETLOCAL(oparg, NULL)do { PyObject *tmp = (fastlocals[oparg]); (fastlocals[oparg])
= ((void*)0); do { if ((tmp) == ((void*)0)) ; else do { if (
_Py_RefTotal-- , --((PyObject*)(tmp))->ob_refcnt != 0) { if
(((PyObject*)tmp)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4420, (PyObject *)(tmp)); } else _Py_Dealloc
((PyObject *)(tmp)); } while (0); } while (0); } while (0)
;
4421 break;
4422 }
4423 case STORE_DEREF137:
4424 {
4425 PyObject **freevars = (f->f_localsplus +
4426 f->f_code->co_nlocals);
4427 PyObject *c = freevars[PEEKARG()((next_instr[2]<<8) + next_instr[1])];
4428 if (PyCell_GET(c)(((PyCellObject *)(c))->ob_ref) == v)
4429 PyCell_Set(c, NULL((void*)0));
4430 break;
4431 }
4432 case STORE_NAME90:
4433 {
4434 PyObject *names = f->f_code->co_names;
4435 PyObject *name = GETITEM(names, PEEKARG())PyTuple_GetItem((names), (((next_instr[2]<<8) + next_instr
[1])))
;
4436 PyObject *locals = f->f_locals;
4437 if (PyDict_CheckExact(locals)((((PyObject*)(locals))->ob_type) == &PyDict_Type) &&
4438 PyDict_GetItem(locals, name) == v) {
4439 if (PyDict_DelItem(locals, name) != 0) {
4440 PyErr_Clear();
4441 }
4442 }
4443 break;
4444 }
4445 }
4446 }
4447
4448 if (Py_REFCNT(v)(((PyObject*)(v))->ob_refcnt) == 1 && !PyUnicode_CHECK_INTERNED(v)(((PyUnicodeObject *)(v))->state)) {
4449 /* Now we own the last reference to 'v', so we can resize it
4450 * in-place.
4451 */
4452 if (PyUnicode_ResizePyUnicodeUCS2_Resize(&v, new_len) != 0) {
4453 /* XXX if PyUnicode_Resize() fails, 'v' has been
4454 * deallocated so it cannot be put back into
4455 * 'variable'. The MemoryError is raised when there
4456 * is no value in 'variable', which might (very
4457 * remotely) be a cause of incompatibilities.
4458 */
4459 return NULL((void*)0);
4460 }
4461 /* copy 'w' into the newly allocated area of 'v' */
4462 memcpy(PyUnicode_AS_UNICODE(v) + v_len,((__builtin_object_size (((__builtin_expect(!(((((((PyObject*
)(v))->ob_type))->tp_flags & ((1L<<28))) != 0
)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462, "PyUnicode_Check(v)"
) : (void)0),(((PyUnicodeObject *)(v))->str)) + v_len, 0) !=
(size_t) -1) ? __builtin___memcpy_chk (((__builtin_expect(!(
((((((PyObject*)(v))->ob_type))->tp_flags & ((1L<<
28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462
, "PyUnicode_Check(v)") : (void)0),(((PyUnicodeObject *)(v))->
str)) + v_len, ((__builtin_expect(!(((((((PyObject*)(w))->
ob_type))->tp_flags & ((1L<<28))) != 0)), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 4463, "PyUnicode_Check(w)") : (void
)0),(((PyUnicodeObject *)(w))->str)), w_len*sizeof(Py_UNICODE
), __builtin_object_size (((__builtin_expect(!(((((((PyObject
*)(v))->ob_type))->tp_flags & ((1L<<28))) != 0
)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462, "PyUnicode_Check(v)"
) : (void)0),(((PyUnicodeObject *)(v))->str)) + v_len, 0))
: __inline_memcpy_chk (((__builtin_expect(!(((((((PyObject*)
(v))->ob_type))->tp_flags & ((1L<<28))) != 0)
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462, "PyUnicode_Check(v)"
) : (void)0),(((PyUnicodeObject *)(v))->str)) + v_len, ((__builtin_expect
(!(((((((PyObject*)(w))->ob_type))->tp_flags & ((1L
<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 4463, "PyUnicode_Check(w)") : (void)0),(((PyUnicodeObject *
)(w))->str)), w_len*sizeof(Py_UNICODE)))
4463 PyUnicode_AS_UNICODE(w), w_len*sizeof(Py_UNICODE))((__builtin_object_size (((__builtin_expect(!(((((((PyObject*
)(v))->ob_type))->tp_flags & ((1L<<28))) != 0
)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462, "PyUnicode_Check(v)"
) : (void)0),(((PyUnicodeObject *)(v))->str)) + v_len, 0) !=
(size_t) -1) ? __builtin___memcpy_chk (((__builtin_expect(!(
((((((PyObject*)(v))->ob_type))->tp_flags & ((1L<<
28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462
, "PyUnicode_Check(v)") : (void)0),(((PyUnicodeObject *)(v))->
str)) + v_len, ((__builtin_expect(!(((((((PyObject*)(w))->
ob_type))->tp_flags & ((1L<<28))) != 0)), 0) ? __assert_rtn
(__func__, "Python/ceval.c", 4463, "PyUnicode_Check(w)") : (void
)0),(((PyUnicodeObject *)(w))->str)), w_len*sizeof(Py_UNICODE
), __builtin_object_size (((__builtin_expect(!(((((((PyObject
*)(v))->ob_type))->tp_flags & ((1L<<28))) != 0
)), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462, "PyUnicode_Check(v)"
) : (void)0),(((PyUnicodeObject *)(v))->str)) + v_len, 0))
: __inline_memcpy_chk (((__builtin_expect(!(((((((PyObject*)
(v))->ob_type))->tp_flags & ((1L<<28))) != 0)
), 0) ? __assert_rtn(__func__, "Python/ceval.c", 4462, "PyUnicode_Check(v)"
) : (void)0),(((PyUnicodeObject *)(v))->str)) + v_len, ((__builtin_expect
(!(((((((PyObject*)(w))->ob_type))->tp_flags & ((1L
<<28))) != 0)), 0) ? __assert_rtn(__func__, "Python/ceval.c"
, 4463, "PyUnicode_Check(w)") : (void)0),(((PyUnicodeObject *
)(w))->str)), w_len*sizeof(Py_UNICODE)))
;
4464 return v;
4465 }
4466 else {
4467 /* When in-place resizing is not an option. */
4468 w = PyUnicode_ConcatPyUnicodeUCS2_Concat(v, w);
4469 Py_DECREF(v)do { if (_Py_RefTotal-- , --((PyObject*)(v))->ob_refcnt !=
0) { if (((PyObject*)v)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4469, (PyObject *)(v)); } else _Py_Dealloc
((PyObject *)(v)); } while (0)
;
4470 return w;
4471 }
4472}
4473
4474#ifdef DYNAMIC_EXECUTION_PROFILE
4475
4476static PyObject *
4477getarray(long a[256])
4478{
4479 int i;
4480 PyObject *l = PyList_New(256);
4481 if (l == NULL((void*)0)) return NULL((void*)0);
4482 for (i = 0; i < 256; i++) {
4483 PyObject *x = PyLong_FromLong(a[i]);
4484 if (x == NULL((void*)0)) {
4485 Py_DECREF(l)do { if (_Py_RefTotal-- , --((PyObject*)(l))->ob_refcnt !=
0) { if (((PyObject*)l)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4485, (PyObject *)(l)); } else _Py_Dealloc
((PyObject *)(l)); } while (0)
;
4486 return NULL((void*)0);
4487 }
4488 PyList_SetItem(l, i, x);
4489 }
4490 for (i = 0; i < 256; i++)
4491 a[i] = 0;
4492 return l;
4493}
4494
4495PyObject *
4496_Py_GetDXProfile(PyObject *self, PyObject *args)
4497{
4498#ifndef DXPAIRS
4499 return getarray(dxp);
4500#else
4501 int i;
4502 PyObject *l = PyList_New(257);
4503 if (l == NULL((void*)0)) return NULL((void*)0);
4504 for (i = 0; i < 257; i++) {
4505 PyObject *x = getarray(dxpairs[i]);
4506 if (x == NULL((void*)0)) {
4507 Py_DECREF(l)do { if (_Py_RefTotal-- , --((PyObject*)(l))->ob_refcnt !=
0) { if (((PyObject*)l)->ob_refcnt < 0) _Py_NegativeRefcount
("Python/ceval.c", 4507, (PyObject *)(l)); } else _Py_Dealloc
((PyObject *)(l)); } while (0)
;
4508 return NULL((void*)0);
4509 }
4510 PyList_SetItem(l, i, x);
4511 }
4512 return l;
4513#endif
4514}
4515
4516#endif