#define COMMON_FIELDS(PREFIX) PyObject *PREFIX ## globals; PyObject *PREFIX ## builtins; PyObject *PREFIX ## name; PyObject *PREFIX ## qualname; PyObject *PREFIX ## code; #define GENERATE_ASDL_SEQ_CONSTRUCTOR(NAME, TYPE) asdl_ ## NAME ## _seq *_Py_asdl_ ## NAME ## _seq_new(Py_ssize_t size, PyArena *arena) { asdl_ ## NAME ## _seq *seq = NULL; size_t n; if (n > SIZE_MAX - sizeof(asdl_ ## NAME ## _seq)) { PyErr_NoMemory(); return NULL; } n += sizeof(asdl_ ## NAME ## _seq); seq = (asdl_ ## NAME ## _seq *)PyArena_Malloc(arena, n); if (!seq) { PyErr_NoMemory(); return NULL; } memset(seq, 0, n); seq->size = size; seq->elements = (void**)seq->typed_elements; return seq; } #define ISWHITESPACE(x) ((x) == ENDMARKER || (x) == NEWLINE || (x) == INDENT || (x) == DEDENT) #define PyAnySet_Check(ob) (Py_IS_TYPE(ob, &PySet_Type) || Py_IS_TYPE(ob, &PyFrozenSet_Type) || PyType_IsSubtype(Py_TYPE(ob), &PySet_Type) || PyType_IsSubtype(Py_TYPE(ob), &PyFrozenSet_Type)) #define PyAnySet_CheckExact(ob) (Py_IS_TYPE(ob, &PySet_Type) || Py_IS_TYPE(ob, &PyFrozenSet_Type)) #define PyByteArray_AS_STRING(self) (assert(PyByteArray_Check(self)), Py_SIZE(self) ? ((PyByteArrayObject *)(self))->ob_start : _PyByteArray_empty_string) #define PyByteArray_GET_SIZE(self) (assert(PyByteArray_Check(self)), Py_SIZE(self)) #define PyBytes_AS_STRING(op) (assert(PyBytes_Check(op)), (((PyBytesObject *)(op))->ob_sval)) #define PyBytes_GET_SIZE(op) (assert(PyBytes_Check(op)),Py_SIZE(op)) #define PyCFunction_GET_CLASS(func) (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_METHOD ? ((PyCMethodObject *)func) -> mm_class : NULL) #define PyCFunction_GET_SELF(func) (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? NULL : ((PyCFunctionObject *)func) -> m_self) #define PyDateTime_DATE_GET_MICROSECOND(o) ((((PyDateTime_DateTime*)o)->data[7] << 16) | (((PyDateTime_DateTime*)o)->data[8] << 8) | ((PyDateTime_DateTime*)o)->data[9]) #define PyDateTime_DATE_GET_TZINFO(o) (_PyDateTime_HAS_TZINFO(o) ? ((PyDateTime_DateTime *)(o))->tzinfo : Py_None) #define PyDateTime_GET_YEAR(o) ((((PyDateTime_Date*)o)->data[0] << 8) | ((PyDateTime_Date*)o)->data[1]) #define PyDateTime_TIME_GET_MICROSECOND(o) ((((PyDateTime_Time*)o)->data[3] << 16) | (((PyDateTime_Time*)o)->data[4] << 8) | ((PyDateTime_Time*)o)->data[5]) #define PyDateTime_TIME_GET_TZINFO(o) (_PyDateTime_HAS_TZINFO(o) ? ((PyDateTime_Time *)(o))->tzinfo : Py_None) #define PyDict_GET_SIZE(mp) (assert(PyDict_Check(mp)),((PyDictObject *)mp)->ma_used) #define PyExceptionClass_Check(x) (PyType_Check((x)) && PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS)) #define PyFrozenSet_Check(ob) (Py_IS_TYPE(ob, &PyFrozenSet_Type) || PyType_IsSubtype(Py_TYPE(ob), &PyFrozenSet_Type)) #define PyHeapType_GET_MEMBERS(etype) ((PyMemberDef *)(((char *)etype) + Py_TYPE(etype)->tp_basicsize)) #define PyMem_New(type, n) ( ((size_t)(n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : ( (type *) PyMem_Malloc((n) * sizeof(type)) ) ) #define PyMem_Resize(p, type, n) ( (p) = ((size_t)(n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) #define PySequence_Fast_GET_ITEM(o, i) (PyList_Check(o) ? PyList_GET_ITEM(o, i) : PyTuple_GET_ITEM(o, i)) #define PySequence_Fast_GET_SIZE(o) (PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o)) #define PySequence_Fast_ITEMS(sf) (PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item : ((PyTupleObject *)(sf))->ob_item) #define PySequence_ITEM(o, i) ( Py_TYPE(o)->tp_as_sequence->sq_item(o, i) ) #define PySet_Check(ob) (Py_IS_TYPE(ob, &PySet_Type) || PyType_IsSubtype(Py_TYPE(ob), &PySet_Type)) #define PySet_GET_SIZE(so) (assert(PyAnySet_Check(so)),(((PySetObject *)(so))->used)) #define PyUnicode_AS_UNICODE(op) (assert(PyUnicode_Check(op)), (((PyASCIIObject *)(op))->wstr) ? (((PyASCIIObject *)(op))->wstr) : PyUnicode_AsUnicode(_PyObject_CAST(op))) #define PyUnicode_DATA(op) (assert(PyUnicode_Check(op)), PyUnicode_IS_COMPACT(op) ? _PyUnicode_COMPACT_DATA(op) : _PyUnicode_NONCOMPACT_DATA(op)) #define PyUnicode_GET_LENGTH(op) (assert(PyUnicode_Check(op)), assert(PyUnicode_IS_READY(op)), ((PyASCIIObject *)(op))->length) #define PyUnicode_GET_SIZE(op) (assert(PyUnicode_Check(op)), (((PyASCIIObject *)(op))->wstr) ? PyUnicode_WSTR_LENGTH(op) : ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)), assert(((PyASCIIObject *)(op))->wstr), PyUnicode_WSTR_LENGTH(op))) #define PyUnicode_IS_ASCII(op) (assert(PyUnicode_Check(op)), assert(PyUnicode_IS_READY(op)), ((PyASCIIObject*)op)->state.ascii) #define PyUnicode_IS_COMPACT_ASCII(op) (((PyASCIIObject*)op)->state.ascii && PyUnicode_IS_COMPACT(op)) #define PyUnicode_KIND(op) (assert(PyUnicode_Check(op)), assert(PyUnicode_IS_READY(op)), ((PyASCIIObject *)(op))->state.kind) #define PyUnicode_MAX_CHAR_VALUE(op) (assert(PyUnicode_IS_READY(op)), (PyUnicode_IS_ASCII(op) ? (0x7f) : (PyUnicode_KIND(op) == PyUnicode_1BYTE_KIND ? (0xffU) : (PyUnicode_KIND(op) == PyUnicode_2BYTE_KIND ? (0xffffU) : (0x10ffffU))))) #define PyUnicode_READ(kind, data, index) ((Py_UCS4) ((kind) == PyUnicode_1BYTE_KIND ? ((const Py_UCS1 *)(data))[(index)] : ((kind) == PyUnicode_2BYTE_KIND ? ((const Py_UCS2 *)(data))[(index)] : ((const Py_UCS4 *)(data))[(index)] ) )) #define PyUnicode_READY(op) (assert(PyUnicode_Check(op)), (PyUnicode_IS_READY(op) ? 0 : _PyUnicode_Ready(_PyObject_CAST(op)))) #define PyUnicode_READ_CHAR(unicode, index) (assert(PyUnicode_Check(unicode)), assert(PyUnicode_IS_READY(unicode)), (Py_UCS4) (PyUnicode_KIND((unicode)) == PyUnicode_1BYTE_KIND ? ((const Py_UCS1 *)(PyUnicode_DATA((unicode))))[(index)] : (PyUnicode_KIND((unicode)) == PyUnicode_2BYTE_KIND ? ((const Py_UCS2 *)(PyUnicode_DATA((unicode))))[(index)] : ((const Py_UCS4 *)(PyUnicode_DATA((unicode))))[(index)] ) )) #define PyUnicode_WRITE(kind, data, index, value) do { switch ((kind)) { case PyUnicode_1BYTE_KIND: { ((Py_UCS1 *)(data))[(index)] = (Py_UCS1)(value); break; } case PyUnicode_2BYTE_KIND: { ((Py_UCS2 *)(data))[(index)] = (Py_UCS2)(value); break; } default: { assert((kind) == PyUnicode_4BYTE_KIND); ((Py_UCS4 *)(data))[(index)] = (Py_UCS4)(value); } } } while (0) #define PyWeakref_Check(op) (PyWeakref_CheckRef(op) || PyWeakref_CheckProxy(op)) #define PyWeakref_CheckProxy(op) (Py_IS_TYPE(op, &_PyWeakref_ProxyType) || Py_IS_TYPE(op, &_PyWeakref_CallableProxyType)) #define PyWeakref_GET_OBJECT(ref) (Py_REFCNT(((PyWeakReference *)(ref))->wr_object) > 0 ? ((PyWeakReference *)(ref))->wr_object : Py_None) #define Py_ABS(x) ((x) < 0 ? -(x) : (x)) #define Py_ARRAY_LENGTH(array) (sizeof(array) / sizeof((array)[0]) + Py_BUILD_ASSERT_EXPR(!__builtin_types_compatible_p(typeof(array), typeof(&(array)[0])))) #define Py_ARRAY_LENGTH(array) (sizeof(array) / sizeof((array)[0])) #define Py_CLEAR(op) do { PyObject *_py_tmp = _PyObject_CAST(op); if (_py_tmp != NULL) { (op) = NULL; Py_DECREF(_py_tmp); } } while (0) #define Py_IS_FINITE(X) (!Py_IS_INFINITY(X) && !Py_IS_NAN(X)) #define Py_IS_NAN(X) ((X) != (X)) #define Py_MAX(x, y) (((x) > (y)) ? (x) : (y)) #define Py_MIN(x, y) (((x) > (y)) ? (y) : (x)) #define Py_OVERFLOWED(X) ((X) != 0.0 && (errno == ERANGE || (X) == Py_HUGE_VAL || (X) == -Py_HUGE_VAL)) #define Py_SAFE_DOWNCAST(VALUE, WIDE, NARROW) (assert((WIDE)(NARROW)(VALUE) == (VALUE)), (NARROW)(VALUE)) #define Py_SET_ERRNO_ON_MATH_ERROR(X) do { if (errno == 0) { if ((X) == Py_HUGE_VAL || (X) == -Py_HUGE_VAL) errno = ERANGE; else _Py_SET_EDOM_FOR_NAN(X) } } while(0) #define Py_TRASHCAN_BEGIN(op, dealloc) Py_TRASHCAN_BEGIN_CONDITION(op, _PyTrash_cond(_PyObject_CAST(op), (destructor)dealloc)) #define Py_UNICODE_ISALNUM(ch) (Py_UNICODE_ISALPHA(ch) || Py_UNICODE_ISDECIMAL(ch) || Py_UNICODE_ISDIGIT(ch) || Py_UNICODE_ISNUMERIC(ch)) #define Py_UNICODE_ISSPACE(ch) ((Py_UCS4)(ch) < 128U ? _Py_ascii_whitespace[(ch)] : _PyUnicode_IsWhitespace(ch)) #define Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDBFF) #define Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= (ch) && (ch) <= 0xDFFF) #define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDFFF) #define Py_VISIT(op) do { if (op) { int vret = visit(_PyObject_CAST(op), arg); if (vret) return vret; } } while (0) #define _PyArg_UnpackKeywords(args, nargs, kwargs, kwnames, parser, minpos, maxpos, minkw, buf) (((minkw) == 0 && (kwargs) == NULL && (kwnames) == NULL && (minpos) <= (nargs) && (nargs) <= (maxpos) && args != NULL) ? (args) : _PyArg_UnpackKeywords((args), (nargs), (kwargs), (kwnames), (parser), (minpos), (maxpos), (minkw), (buf))) #define _PyGCHead_SET_PREV(g, p) do { assert(((uintptr_t)p & ~_PyGC_PREV_MASK) == 0); (g)->_gc_prev = ((g)->_gc_prev & ~_PyGC_PREV_MASK) | ((uintptr_t)(p)); } while (0) #define _PyGenObject_HEAD(prefix) PyObject_HEAD PyObject *prefix##_qualname; _PyErr_StackItem prefix##_exc_state; #define _PyList_CAST(op) (assert(PyList_Check(op)), (PyListObject *)(op)) #define _PyObject_GC_MAY_BE_TRACKED(obj) (PyObject_IS_GC(obj) && (!PyTuple_CheckExact(obj) || _PyObject_GC_IS_TRACKED(obj))) #define _PyObject_VAR_SIZE(typeobj, nitems) _Py_SIZE_ROUND_UP((typeobj)->tp_basicsize + (nitems)*(typeobj)->tp_itemsize, SIZEOF_VOID_P) #define _PyTuple_CAST(op) (assert(PyTuple_Check(op)), (PyTupleObject *)(op)) #define _PyUnicodeWriter_Prepare(WRITER, LENGTH, MAXCHAR) (((MAXCHAR) <= (WRITER)->maxchar && (LENGTH) <= (WRITER)->size - (WRITER)->pos) ? 0 : (((LENGTH) == 0) ? 0 : _PyUnicodeWriter_PrepareInternal((WRITER), (LENGTH), (MAXCHAR)))) #define _PyUnicodeWriter_PrepareKind(WRITER, KIND) (assert((KIND) != PyUnicode_WCHAR_KIND), (KIND) <= (WRITER)->kind ? 0 : _PyUnicodeWriter_PrepareKindInternal((WRITER), (KIND))) #define _PyUnicode_COMPACT_DATA(op) (PyUnicode_IS_ASCII(op) ? ((void*)((PyASCIIObject*)(op) + 1)) : ((void*)((PyCompactUnicodeObject*)(op) + 1))) #define _PyUnicode_NONCOMPACT_DATA(op) (assert(((PyUnicodeObject*)(op))->data.any), ((((PyUnicodeObject *)(op))->data.any))) #define _Py_ANNOTATE_BENIGN_RACE(pointer, description) AnnotateBenignRaceSized(__FILE__, __LINE__, pointer, sizeof(*(pointer)), description) #define _Py_ANNOTATE_BENIGN_RACE_STATIC(static_var, description) namespace { class static_var ## _annotator { public: static_var ## _annotator() { _Py_ANNOTATE_BENIGN_RACE_SIZED(&static_var, sizeof(static_var), # static_var ": " description); } }; static static_var ## _annotator the ## static_var ## _annotator; } #define _Py_IDENTIFIER(varname) _Py_static_string(PyId_##varname, #varname) #define _Py_InIntegralTypeRange(type, v) (_Py_IntegralTypeMin(type) <= v && v <= _Py_IntegralTypeMax(type)) #define _Py_IntegralTypeMax(type) ((_Py_IntegralTypeSigned(type)) ? (((((type)1 << (sizeof(type)*CHAR_BIT - 2)) - 1) << 1) + 1) : ~(type)0) #define _Py_IntegralTypeMin(type) ((_Py_IntegralTypeSigned(type)) ? -_Py_IntegralTypeMax(type) - 1 : 0) #define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) ( sizeof((ATOMIC_VAL)->_value) == 8 ? _Py_atomic_load_64bit((ATOMIC_VAL), (ORDER)) : _Py_atomic_load_32bit((ATOMIC_VAL), (ORDER)) ) #define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) ( sizeof((ATOMIC_VAL)->_value) == 8 ? _Py_atomic_load_64bit((ATOMIC_VAL), ORDER) : _Py_atomic_load_32bit((ATOMIC_VAL), ORDER) ) #define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) __extension__ ({ __typeof__(ATOMIC_VAL) atomic_val = ATOMIC_VAL; __typeof__(atomic_val->_value) result; volatile __typeof__(result) *volatile_data = &atomic_val->_value; _Py_memory_order order = ORDER; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); break; } _Py_ANNOTATE_IGNORE_READS_END(); result; }) #define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) __extension__ ({ __typeof__(ATOMIC_VAL) atomic_val = ATOMIC_VAL; __typeof__(atomic_val->_value) new_val = NEW_VAL; volatile __typeof__(new_val) *volatile_data = &atomic_val->_value; _Py_memory_order order = ORDER; _Py_ANNOTATE_MEMORY_ORDER(atomic_val, order); case _Py_memory_order_relaxed: *volatile_data = new_val; break; case _Py_memory_order_acquire: case _Py_memory_order_acq_rel: case _Py_memory_order_seq_cst: __asm__ volatile("xchg %0, %1" : "+r"(new_val) : "m"(atomic_val->_value) : "memory"); break; } _Py_ANNOTATE_IGNORE_WRITES_END(); }) #define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) if (sizeof((ATOMIC_VAL)->_value) == 8) { _Py_atomic_store_64bit((ATOMIC_VAL), (NEW_VAL), (ORDER)) } else { _Py_atomic_store_32bit((ATOMIC_VAL), (NEW_VAL), (ORDER)) } #define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) if (sizeof((ATOMIC_VAL)->_value) == 8) { _Py_atomic_store_64bit((ATOMIC_VAL), NEW_VAL, ORDER) } else { _Py_atomic_store_32bit((ATOMIC_VAL), NEW_VAL, ORDER) } #define asdl_seq_LEN(S) ((S) == NULL ? 0 : (S)->size) #define asdl_seq_SET(S, I, V) do { Py_ssize_t _asdl_i = (I); assert((S) != NULL); assert(0 <= _asdl_i && _asdl_i < (S)->size); (S)->typed_elements[_asdl_i] = (V); } while (0) #define asdl_seq_SET_UNTYPED(S, I, V) do { Py_ssize_t _asdl_i = (I); assert((S) != NULL); assert(0 <= _asdl_i && _asdl_i < (S)->size); (S)->elements[_asdl_i] = (V); } while (0) #define foo_to_char(foo) ((char *)(foo) + Py_BUILD_ASSERT_EXPR(offsetof(struct foo, string) == 0))