mirror of
				https://github.com/python/cpython.git
				synced 2025-10-31 05:31:20 +00:00 
			
		
		
		
	 1e197e63e2
			
		
	
	
		1e197e63e2
		
			
		
	
	
	
	
		
			
			* Adds EXIT_INTERPRETER instruction to exit PyEval_EvalDefault() * Simplifies RETURN_VALUE, YIELD_VALUE and RETURN_GENERATOR instructions as they no longer need to check for entry frames.
		
			
				
	
	
		
			3519 lines
		
	
	
	
		
			111 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			3519 lines
		
	
	
	
		
			111 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
| /* Execute compiled code */
 | |
| 
 | |
| /* XXX TO DO:
 | |
|    XXX speed up searching for keywords by using a dictionary
 | |
|    XXX document it!
 | |
|    */
 | |
| 
 | |
| #define _PY_INTERPRETER
 | |
| 
 | |
| #include "Python.h"
 | |
| #include "pycore_abstract.h"      // _PyIndex_Check()
 | |
| #include "pycore_call.h"          // _PyObject_FastCallDictTstate()
 | |
| #include "pycore_ceval.h"         // _PyEval_SignalAsyncExc()
 | |
| #include "pycore_code.h"
 | |
| #include "pycore_function.h"
 | |
| #include "pycore_long.h"          // _PyLong_GetZero()
 | |
| #include "pycore_object.h"        // _PyObject_GC_TRACK()
 | |
| #include "pycore_moduleobject.h"  // PyModuleObject
 | |
| #include "pycore_opcode.h"        // EXTRA_CASES
 | |
| #include "pycore_pyerrors.h"      // _PyErr_Fetch()
 | |
| #include "pycore_pymem.h"         // _PyMem_IsPtrFreed()
 | |
| #include "pycore_pystate.h"       // _PyInterpreterState_GET()
 | |
| #include "pycore_range.h"         // _PyRangeIterObject
 | |
| #include "pycore_sliceobject.h"   // _PyBuildSlice_ConsumeRefs
 | |
| #include "pycore_sysmodule.h"     // _PySys_Audit()
 | |
| #include "pycore_tuple.h"         // _PyTuple_ITEMS()
 | |
| #include "pycore_emscripten_signal.h"  // _Py_CHECK_EMSCRIPTEN_SIGNALS
 | |
| 
 | |
| #include "pycore_dict.h"
 | |
| #include "dictobject.h"
 | |
| #include "pycore_frame.h"
 | |
| #include "opcode.h"
 | |
| #include "pydtrace.h"
 | |
| #include "setobject.h"
 | |
| #include "structmember.h"         // struct PyMemberDef, T_OFFSET_EX
 | |
| 
 | |
| #include <ctype.h>
 | |
| #include <stdbool.h>
 | |
| 
 | |
| #ifdef Py_DEBUG
 | |
|    /* For debugging the interpreter: */
 | |
| #  define LLTRACE  1      /* Low-level trace feature */
 | |
| #endif
 | |
| 
 | |
| #if !defined(Py_BUILD_CORE)
 | |
| #  error "ceval.c must be build with Py_BUILD_CORE define for best performance"
 | |
| #endif
 | |
| 
 | |
| #if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS)
 | |
| // GH-89279: The MSVC compiler does not inline these static inline functions
 | |
| // in PGO build in _PyEval_EvalFrameDefault(), because this function is over
 | |
| // the limit of PGO, and that limit cannot be configured.
 | |
| // Define them as macros to make sure that they are always inlined by the
 | |
| // preprocessor.
 | |
| 
 | |
| #undef Py_DECREF
 | |
| #define Py_DECREF(arg) \
 | |
|     do { \
 | |
|         _Py_DECREF_STAT_INC(); \
 | |
|         PyObject *op = _PyObject_CAST(arg); \
 | |
|         if (--op->ob_refcnt == 0) { \
 | |
|             destructor dealloc = Py_TYPE(op)->tp_dealloc; \
 | |
|             (*dealloc)(op); \
 | |
|         } \
 | |
|     } while (0)
 | |
| 
 | |
| #undef Py_XDECREF
 | |
| #define Py_XDECREF(arg) \
 | |
|     do { \
 | |
|         PyObject *xop = _PyObject_CAST(arg); \
 | |
|         if (xop != NULL) { \
 | |
|             Py_DECREF(xop); \
 | |
|         } \
 | |
|     } while (0)
 | |
| 
 | |
| #undef Py_IS_TYPE
 | |
| #define Py_IS_TYPE(ob, type) \
 | |
|     (_PyObject_CAST(ob)->ob_type == (type))
 | |
| 
 | |
| #undef _Py_DECREF_SPECIALIZED
 | |
| #define _Py_DECREF_SPECIALIZED(arg, dealloc) \
 | |
|     do { \
 | |
|         _Py_DECREF_STAT_INC(); \
 | |
|         PyObject *op = _PyObject_CAST(arg); \
 | |
|         if (--op->ob_refcnt == 0) { \
 | |
|             destructor d = (destructor)(dealloc); \
 | |
|             d(op); \
 | |
|         } \
 | |
|     } while (0)
 | |
| #endif
 | |
| 
 | |
| // GH-89279: Similar to above, force inlining by using a macro.
 | |
| #if defined(_MSC_VER) && SIZEOF_INT == 4
 | |
| #define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) (assert(sizeof((ATOMIC_VAL)->_value) == 4), *((volatile int*)&((ATOMIC_VAL)->_value)))
 | |
| #else
 | |
| #define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) _Py_atomic_load_relaxed(ATOMIC_VAL)
 | |
| #endif
 | |
| 
 | |
| #define HEAD_LOCK(runtime) \
 | |
|     PyThread_acquire_lock((runtime)->interpreters.mutex, WAIT_LOCK)
 | |
| #define HEAD_UNLOCK(runtime) \
 | |
|     PyThread_release_lock((runtime)->interpreters.mutex)
 | |
| 
 | |
| /* Forward declarations */
 | |
| static PyObject *trace_call_function(
 | |
|     PyThreadState *tstate, PyObject *callable, PyObject **stack,
 | |
|     Py_ssize_t oparg, PyObject *kwnames);
 | |
| static PyObject * do_call_core(
 | |
|     PyThreadState *tstate, PyObject *func,
 | |
|     PyObject *callargs, PyObject *kwdict, int use_tracing);
 | |
| 
 | |
| #ifdef LLTRACE
 | |
| static void
 | |
| dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer)
 | |
| {
 | |
|     PyObject **stack_base = _PyFrame_Stackbase(frame);
 | |
|     PyObject *type, *value, *traceback;
 | |
|     PyErr_Fetch(&type, &value, &traceback);
 | |
|     printf("    stack=[");
 | |
|     for (PyObject **ptr = stack_base; ptr < stack_pointer; ptr++) {
 | |
|         if (ptr != stack_base) {
 | |
|             printf(", ");
 | |
|         }
 | |
|         if (PyObject_Print(*ptr, stdout, 0) != 0) {
 | |
|             PyErr_Clear();
 | |
|             printf("<%s object at %p>",
 | |
|                    Py_TYPE(*ptr)->tp_name, (void *)(*ptr));
 | |
|         }
 | |
|     }
 | |
|     printf("]\n");
 | |
|     fflush(stdout);
 | |
|     PyErr_Restore(type, value, traceback);
 | |
| }
 | |
| 
 | |
| static void
 | |
| lltrace_instruction(_PyInterpreterFrame *frame,
 | |
|                     PyObject **stack_pointer,
 | |
|                     _Py_CODEUNIT *next_instr)
 | |
| {
 | |
|     dump_stack(frame, stack_pointer);
 | |
|     int oparg = _Py_OPARG(*next_instr);
 | |
|     int opcode = _Py_OPCODE(*next_instr);
 | |
|     const char *opname = _PyOpcode_OpName[opcode];
 | |
|     assert(opname != NULL);
 | |
|     int offset = (int)(next_instr - _PyCode_CODE(frame->f_code));
 | |
|     if (HAS_ARG((int)_PyOpcode_Deopt[opcode])) {
 | |
|         printf("%d: %s %d\n", offset * 2, opname, oparg);
 | |
|     }
 | |
|     else {
 | |
|         printf("%d: %s\n", offset * 2, opname);
 | |
|     }
 | |
|     fflush(stdout);
 | |
| }
 | |
| static void
 | |
| lltrace_resume_frame(_PyInterpreterFrame *frame)
 | |
| {
 | |
|     PyObject *fobj = frame->f_funcobj;
 | |
|     if (frame->owner == FRAME_OWNED_BY_CSTACK ||
 | |
|         fobj == NULL ||
 | |
|         !PyFunction_Check(fobj)
 | |
|     ) {
 | |
|         printf("\nResuming frame.");
 | |
|         return;
 | |
|     }
 | |
|     PyFunctionObject *f = (PyFunctionObject *)fobj;
 | |
|     PyObject *type, *value, *traceback;
 | |
|     PyErr_Fetch(&type, &value, &traceback);
 | |
|     PyObject *name = f->func_qualname;
 | |
|     if (name == NULL) {
 | |
|         name = f->func_name;
 | |
|     }
 | |
|     printf("\nResuming frame");
 | |
|     if (name) {
 | |
|         printf(" for ");
 | |
|         if (PyObject_Print(name, stdout, 0) < 0) {
 | |
|             PyErr_Clear();
 | |
|         }
 | |
|     }
 | |
|     if (f->func_module) {
 | |
|         printf(" in module ");
 | |
|         if (PyObject_Print(f->func_module, stdout, 0) < 0) {
 | |
|             PyErr_Clear();
 | |
|         }
 | |
|     }
 | |
|     printf("\n");
 | |
|     fflush(stdout);
 | |
|     PyErr_Restore(type, value, traceback);
 | |
| }
 | |
| #endif
 | |
| static int call_trace(Py_tracefunc, PyObject *,
 | |
|                       PyThreadState *, _PyInterpreterFrame *,
 | |
|                       int, PyObject *);
 | |
| static int call_trace_protected(Py_tracefunc, PyObject *,
 | |
|                                 PyThreadState *, _PyInterpreterFrame *,
 | |
|                                 int, PyObject *);
 | |
| static void call_exc_trace(Py_tracefunc, PyObject *,
 | |
|                            PyThreadState *, _PyInterpreterFrame *);
 | |
| static int maybe_call_line_trace(Py_tracefunc, PyObject *,
 | |
|                                  PyThreadState *, _PyInterpreterFrame *, int);
 | |
| static void maybe_dtrace_line(_PyInterpreterFrame *, PyTraceInfo *, int);
 | |
| static void dtrace_function_entry(_PyInterpreterFrame *);
 | |
| static void dtrace_function_return(_PyInterpreterFrame *);
 | |
| 
 | |
| static PyObject * import_name(PyThreadState *, _PyInterpreterFrame *,
 | |
|                               PyObject *, PyObject *, PyObject *);
 | |
| static PyObject * import_from(PyThreadState *, PyObject *, PyObject *);
 | |
| static int import_all_from(PyThreadState *, PyObject *, PyObject *);
 | |
| static void format_exc_check_arg(PyThreadState *, PyObject *, const char *, PyObject *);
 | |
| static void format_exc_unbound(PyThreadState *tstate, PyCodeObject *co, int oparg);
 | |
| static int check_args_iterable(PyThreadState *, PyObject *func, PyObject *vararg);
 | |
| static int check_except_type_valid(PyThreadState *tstate, PyObject* right);
 | |
| static int check_except_star_type_valid(PyThreadState *tstate, PyObject* right);
 | |
| static void format_kwargs_error(PyThreadState *, PyObject *func, PyObject *kwargs);
 | |
| static void format_awaitable_error(PyThreadState *, PyTypeObject *, int);
 | |
| static int get_exception_handler(PyCodeObject *, int, int*, int*, int*);
 | |
| static _PyInterpreterFrame *
 | |
| _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
 | |
|                         PyObject *locals, PyObject* const* args,
 | |
|                         size_t argcount, PyObject *kwnames);
 | |
| static void
 | |
| _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame);
 | |
| 
 | |
| #define NAME_ERROR_MSG \
 | |
|     "name '%.200s' is not defined"
 | |
| #define UNBOUNDLOCAL_ERROR_MSG \
 | |
|     "cannot access local variable '%s' where it is not associated with a value"
 | |
| #define UNBOUNDFREE_ERROR_MSG \
 | |
|     "cannot access free variable '%s' where it is not associated with a" \
 | |
|     " value in enclosing scope"
 | |
| 
 | |
| #ifndef NDEBUG
 | |
| /* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and
 | |
|    PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen
 | |
|    when a thread continues to run after Python finalization, especially
 | |
|    daemon threads. */
 | |
| static int
 | |
| is_tstate_valid(PyThreadState *tstate)
 | |
| {
 | |
|     assert(!_PyMem_IsPtrFreed(tstate));
 | |
|     assert(!_PyMem_IsPtrFreed(tstate->interp));
 | |
|     return 1;
 | |
| }
 | |
| #endif
 | |
| 
 | |
| 
 | |
| #ifdef HAVE_ERRNO_H
 | |
| #include <errno.h>
 | |
| #endif
 | |
| 
 | |
| int
 | |
| Py_GetRecursionLimit(void)
 | |
| {
 | |
|     PyInterpreterState *interp = _PyInterpreterState_GET();
 | |
|     return interp->ceval.recursion_limit;
 | |
| }
 | |
| 
 | |
| void
 | |
| Py_SetRecursionLimit(int new_limit)
 | |
| {
 | |
|     PyInterpreterState *interp = _PyInterpreterState_GET();
 | |
|     interp->ceval.recursion_limit = new_limit;
 | |
|     for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) {
 | |
|         int depth = p->py_recursion_limit - p->py_recursion_remaining;
 | |
|         p->py_recursion_limit = new_limit;
 | |
|         p->py_recursion_remaining = new_limit - depth;
 | |
|     }
 | |
| }
 | |
| 
 | |
| /* The function _Py_EnterRecursiveCallTstate() only calls _Py_CheckRecursiveCall()
 | |
|    if the recursion_depth reaches recursion_limit. */
 | |
| int
 | |
| _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where)
 | |
| {
 | |
| #ifdef USE_STACKCHECK
 | |
|     if (PyOS_CheckStack()) {
 | |
|         ++tstate->c_recursion_remaining;
 | |
|         _PyErr_SetString(tstate, PyExc_MemoryError, "Stack overflow");
 | |
|         return -1;
 | |
|     }
 | |
| #endif
 | |
|     if (tstate->recursion_headroom) {
 | |
|         if (tstate->c_recursion_remaining < -50) {
 | |
|             /* Overflowing while handling an overflow. Give up. */
 | |
|             Py_FatalError("Cannot recover from stack overflow.");
 | |
|         }
 | |
|     }
 | |
|     else {
 | |
|         if (tstate->c_recursion_remaining <= 0) {
 | |
|             tstate->recursion_headroom++;
 | |
|             _PyErr_Format(tstate, PyExc_RecursionError,
 | |
|                         "maximum recursion depth exceeded%s",
 | |
|                         where);
 | |
|             tstate->recursion_headroom--;
 | |
|             ++tstate->c_recursion_remaining;
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| 
 | |
| static const binaryfunc binary_ops[] = {
 | |
|     [NB_ADD] = PyNumber_Add,
 | |
|     [NB_AND] = PyNumber_And,
 | |
|     [NB_FLOOR_DIVIDE] = PyNumber_FloorDivide,
 | |
|     [NB_LSHIFT] = PyNumber_Lshift,
 | |
|     [NB_MATRIX_MULTIPLY] = PyNumber_MatrixMultiply,
 | |
|     [NB_MULTIPLY] = PyNumber_Multiply,
 | |
|     [NB_REMAINDER] = PyNumber_Remainder,
 | |
|     [NB_OR] = PyNumber_Or,
 | |
|     [NB_POWER] = _PyNumber_PowerNoMod,
 | |
|     [NB_RSHIFT] = PyNumber_Rshift,
 | |
|     [NB_SUBTRACT] = PyNumber_Subtract,
 | |
|     [NB_TRUE_DIVIDE] = PyNumber_TrueDivide,
 | |
|     [NB_XOR] = PyNumber_Xor,
 | |
|     [NB_INPLACE_ADD] = PyNumber_InPlaceAdd,
 | |
|     [NB_INPLACE_AND] = PyNumber_InPlaceAnd,
 | |
|     [NB_INPLACE_FLOOR_DIVIDE] = PyNumber_InPlaceFloorDivide,
 | |
|     [NB_INPLACE_LSHIFT] = PyNumber_InPlaceLshift,
 | |
|     [NB_INPLACE_MATRIX_MULTIPLY] = PyNumber_InPlaceMatrixMultiply,
 | |
|     [NB_INPLACE_MULTIPLY] = PyNumber_InPlaceMultiply,
 | |
|     [NB_INPLACE_REMAINDER] = PyNumber_InPlaceRemainder,
 | |
|     [NB_INPLACE_OR] = PyNumber_InPlaceOr,
 | |
|     [NB_INPLACE_POWER] = _PyNumber_InPlacePowerNoMod,
 | |
|     [NB_INPLACE_RSHIFT] = PyNumber_InPlaceRshift,
 | |
|     [NB_INPLACE_SUBTRACT] = PyNumber_InPlaceSubtract,
 | |
|     [NB_INPLACE_TRUE_DIVIDE] = PyNumber_InPlaceTrueDivide,
 | |
|     [NB_INPLACE_XOR] = PyNumber_InPlaceXor,
 | |
| };
 | |
| 
 | |
| 
 | |
| // PEP 634: Structural Pattern Matching
 | |
| 
 | |
| 
 | |
| // Return a tuple of values corresponding to keys, with error checks for
 | |
| // duplicate/missing keys.
 | |
| static PyObject*
 | |
| match_keys(PyThreadState *tstate, PyObject *map, PyObject *keys)
 | |
| {
 | |
|     assert(PyTuple_CheckExact(keys));
 | |
|     Py_ssize_t nkeys = PyTuple_GET_SIZE(keys);
 | |
|     if (!nkeys) {
 | |
|         // No keys means no items.
 | |
|         return PyTuple_New(0);
 | |
|     }
 | |
|     PyObject *seen = NULL;
 | |
|     PyObject *dummy = NULL;
 | |
|     PyObject *values = NULL;
 | |
|     PyObject *get = NULL;
 | |
|     // We use the two argument form of map.get(key, default) for two reasons:
 | |
|     // - Atomically check for a key and get its value without error handling.
 | |
|     // - Don't cause key creation or resizing in dict subclasses like
 | |
|     //   collections.defaultdict that define __missing__ (or similar).
 | |
|     int meth_found = _PyObject_GetMethod(map, &_Py_ID(get), &get);
 | |
|     if (get == NULL) {
 | |
|         goto fail;
 | |
|     }
 | |
|     seen = PySet_New(NULL);
 | |
|     if (seen == NULL) {
 | |
|         goto fail;
 | |
|     }
 | |
|     // dummy = object()
 | |
|     dummy = _PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type);
 | |
|     if (dummy == NULL) {
 | |
|         goto fail;
 | |
|     }
 | |
|     values = PyTuple_New(nkeys);
 | |
|     if (values == NULL) {
 | |
|         goto fail;
 | |
|     }
 | |
|     for (Py_ssize_t i = 0; i < nkeys; i++) {
 | |
|         PyObject *key = PyTuple_GET_ITEM(keys, i);
 | |
|         if (PySet_Contains(seen, key) || PySet_Add(seen, key)) {
 | |
|             if (!_PyErr_Occurred(tstate)) {
 | |
|                 // Seen it before!
 | |
|                 _PyErr_Format(tstate, PyExc_ValueError,
 | |
|                               "mapping pattern checks duplicate key (%R)", key);
 | |
|             }
 | |
|             goto fail;
 | |
|         }
 | |
|         PyObject *args[] = { map, key, dummy };
 | |
|         PyObject *value = NULL;
 | |
|         if (meth_found) {
 | |
|             value = PyObject_Vectorcall(get, args, 3, NULL);
 | |
|         }
 | |
|         else {
 | |
|             value = PyObject_Vectorcall(get, &args[1], 2, NULL);
 | |
|         }
 | |
|         if (value == NULL) {
 | |
|             goto fail;
 | |
|         }
 | |
|         if (value == dummy) {
 | |
|             // key not in map!
 | |
|             Py_DECREF(value);
 | |
|             Py_DECREF(values);
 | |
|             // Return None:
 | |
|             values = Py_NewRef(Py_None);
 | |
|             goto done;
 | |
|         }
 | |
|         PyTuple_SET_ITEM(values, i, value);
 | |
|     }
 | |
|     // Success:
 | |
| done:
 | |
|     Py_DECREF(get);
 | |
|     Py_DECREF(seen);
 | |
|     Py_DECREF(dummy);
 | |
|     return values;
 | |
| fail:
 | |
|     Py_XDECREF(get);
 | |
|     Py_XDECREF(seen);
 | |
|     Py_XDECREF(dummy);
 | |
|     Py_XDECREF(values);
 | |
|     return NULL;
 | |
| }
 | |
| 
 | |
| // Extract a named attribute from the subject, with additional bookkeeping to
 | |
| // raise TypeErrors for repeated lookups. On failure, return NULL (with no
 | |
| // error set). Use _PyErr_Occurred(tstate) to disambiguate.
 | |
| static PyObject*
 | |
| match_class_attr(PyThreadState *tstate, PyObject *subject, PyObject *type,
 | |
|                  PyObject *name, PyObject *seen)
 | |
| {
 | |
|     assert(PyUnicode_CheckExact(name));
 | |
|     assert(PySet_CheckExact(seen));
 | |
|     if (PySet_Contains(seen, name) || PySet_Add(seen, name)) {
 | |
|         if (!_PyErr_Occurred(tstate)) {
 | |
|             // Seen it before!
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "%s() got multiple sub-patterns for attribute %R",
 | |
|                           ((PyTypeObject*)type)->tp_name, name);
 | |
|         }
 | |
|         return NULL;
 | |
|     }
 | |
|     PyObject *attr = PyObject_GetAttr(subject, name);
 | |
|     if (attr == NULL && _PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) {
 | |
|         _PyErr_Clear(tstate);
 | |
|     }
 | |
|     return attr;
 | |
| }
 | |
| 
 | |
| // On success (match), return a tuple of extracted attributes. On failure (no
 | |
| // match), return NULL. Use _PyErr_Occurred(tstate) to disambiguate.
 | |
| static PyObject*
 | |
| match_class(PyThreadState *tstate, PyObject *subject, PyObject *type,
 | |
|             Py_ssize_t nargs, PyObject *kwargs)
 | |
| {
 | |
|     if (!PyType_Check(type)) {
 | |
|         const char *e = "called match pattern must be a type";
 | |
|         _PyErr_Format(tstate, PyExc_TypeError, e);
 | |
|         return NULL;
 | |
|     }
 | |
|     assert(PyTuple_CheckExact(kwargs));
 | |
|     // First, an isinstance check:
 | |
|     if (PyObject_IsInstance(subject, type) <= 0) {
 | |
|         return NULL;
 | |
|     }
 | |
|     // So far so good:
 | |
|     PyObject *seen = PySet_New(NULL);
 | |
|     if (seen == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     PyObject *attrs = PyList_New(0);
 | |
|     if (attrs == NULL) {
 | |
|         Py_DECREF(seen);
 | |
|         return NULL;
 | |
|     }
 | |
|     // NOTE: From this point on, goto fail on failure:
 | |
|     PyObject *match_args = NULL;
 | |
|     // First, the positional subpatterns:
 | |
|     if (nargs) {
 | |
|         int match_self = 0;
 | |
|         match_args = PyObject_GetAttrString(type, "__match_args__");
 | |
|         if (match_args) {
 | |
|             if (!PyTuple_CheckExact(match_args)) {
 | |
|                 const char *e = "%s.__match_args__ must be a tuple (got %s)";
 | |
|                 _PyErr_Format(tstate, PyExc_TypeError, e,
 | |
|                               ((PyTypeObject *)type)->tp_name,
 | |
|                               Py_TYPE(match_args)->tp_name);
 | |
|                 goto fail;
 | |
|             }
 | |
|         }
 | |
|         else if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) {
 | |
|             _PyErr_Clear(tstate);
 | |
|             // _Py_TPFLAGS_MATCH_SELF is only acknowledged if the type does not
 | |
|             // define __match_args__. This is natural behavior for subclasses:
 | |
|             // it's as if __match_args__ is some "magic" value that is lost as
 | |
|             // soon as they redefine it.
 | |
|             match_args = PyTuple_New(0);
 | |
|             match_self = PyType_HasFeature((PyTypeObject*)type,
 | |
|                                             _Py_TPFLAGS_MATCH_SELF);
 | |
|         }
 | |
|         else {
 | |
|             goto fail;
 | |
|         }
 | |
|         assert(PyTuple_CheckExact(match_args));
 | |
|         Py_ssize_t allowed = match_self ? 1 : PyTuple_GET_SIZE(match_args);
 | |
|         if (allowed < nargs) {
 | |
|             const char *plural = (allowed == 1) ? "" : "s";
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "%s() accepts %d positional sub-pattern%s (%d given)",
 | |
|                           ((PyTypeObject*)type)->tp_name,
 | |
|                           allowed, plural, nargs);
 | |
|             goto fail;
 | |
|         }
 | |
|         if (match_self) {
 | |
|             // Easy. Copy the subject itself, and move on to kwargs.
 | |
|             PyList_Append(attrs, subject);
 | |
|         }
 | |
|         else {
 | |
|             for (Py_ssize_t i = 0; i < nargs; i++) {
 | |
|                 PyObject *name = PyTuple_GET_ITEM(match_args, i);
 | |
|                 if (!PyUnicode_CheckExact(name)) {
 | |
|                     _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                                   "__match_args__ elements must be strings "
 | |
|                                   "(got %s)", Py_TYPE(name)->tp_name);
 | |
|                     goto fail;
 | |
|                 }
 | |
|                 PyObject *attr = match_class_attr(tstate, subject, type, name,
 | |
|                                                   seen);
 | |
|                 if (attr == NULL) {
 | |
|                     goto fail;
 | |
|                 }
 | |
|                 PyList_Append(attrs, attr);
 | |
|                 Py_DECREF(attr);
 | |
|             }
 | |
|         }
 | |
|         Py_CLEAR(match_args);
 | |
|     }
 | |
|     // Finally, the keyword subpatterns:
 | |
|     for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(kwargs); i++) {
 | |
|         PyObject *name = PyTuple_GET_ITEM(kwargs, i);
 | |
|         PyObject *attr = match_class_attr(tstate, subject, type, name, seen);
 | |
|         if (attr == NULL) {
 | |
|             goto fail;
 | |
|         }
 | |
|         PyList_Append(attrs, attr);
 | |
|         Py_DECREF(attr);
 | |
|     }
 | |
|     Py_SETREF(attrs, PyList_AsTuple(attrs));
 | |
|     Py_DECREF(seen);
 | |
|     return attrs;
 | |
| fail:
 | |
|     // We really don't care whether an error was raised or not... that's our
 | |
|     // caller's problem. All we know is that the match failed.
 | |
|     Py_XDECREF(match_args);
 | |
|     Py_DECREF(seen);
 | |
|     Py_DECREF(attrs);
 | |
|     return NULL;
 | |
| }
 | |
| 
 | |
| 
 | |
| static int do_raise(PyThreadState *tstate, PyObject *exc, PyObject *cause);
 | |
| static int exception_group_match(
 | |
|     PyObject* exc_value, PyObject *match_type,
 | |
|     PyObject **match, PyObject **rest);
 | |
| 
 | |
| static int unpack_iterable(PyThreadState *, PyObject *, int, int, PyObject **);
 | |
| 
 | |
| PyObject *
 | |
| PyEval_EvalCode(PyObject *co, PyObject *globals, PyObject *locals)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     if (locals == NULL) {
 | |
|         locals = globals;
 | |
|     }
 | |
|     PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref
 | |
|     if (builtins == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     PyFrameConstructor desc = {
 | |
|         .fc_globals = globals,
 | |
|         .fc_builtins = builtins,
 | |
|         .fc_name = ((PyCodeObject *)co)->co_name,
 | |
|         .fc_qualname = ((PyCodeObject *)co)->co_name,
 | |
|         .fc_code = co,
 | |
|         .fc_defaults = NULL,
 | |
|         .fc_kwdefaults = NULL,
 | |
|         .fc_closure = NULL
 | |
|     };
 | |
|     PyFunctionObject *func = _PyFunction_FromConstructor(&desc);
 | |
|     if (func == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     EVAL_CALL_STAT_INC(EVAL_CALL_LEGACY);
 | |
|     PyObject *res = _PyEval_Vector(tstate, func, locals, NULL, 0, NULL);
 | |
|     Py_DECREF(func);
 | |
|     return res;
 | |
| }
 | |
| 
 | |
| 
 | |
| /* Interpreter main loop */
 | |
| 
 | |
| PyObject *
 | |
| PyEval_EvalFrame(PyFrameObject *f)
 | |
| {
 | |
|     /* Function kept for backward compatibility */
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return _PyEval_EvalFrame(tstate, f->f_frame, 0);
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return _PyEval_EvalFrame(tstate, f->f_frame, throwflag);
 | |
| }
 | |
| 
 | |
| 
 | |
| /* Computed GOTOs, or
 | |
|        the-optimization-commonly-but-improperly-known-as-"threaded code"
 | |
|    using gcc's labels-as-values extension
 | |
|    (http://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html).
 | |
| 
 | |
|    The traditional bytecode evaluation loop uses a "switch" statement, which
 | |
|    decent compilers will optimize as a single indirect branch instruction
 | |
|    combined with a lookup table of jump addresses. However, since the
 | |
|    indirect jump instruction is shared by all opcodes, the CPU will have a
 | |
|    hard time making the right prediction for where to jump next (actually,
 | |
|    it will be always wrong except in the uncommon case of a sequence of
 | |
|    several identical opcodes).
 | |
| 
 | |
|    "Threaded code" in contrast, uses an explicit jump table and an explicit
 | |
|    indirect jump instruction at the end of each opcode. Since the jump
 | |
|    instruction is at a different address for each opcode, the CPU will make a
 | |
|    separate prediction for each of these instructions, which is equivalent to
 | |
|    predicting the second opcode of each opcode pair. These predictions have
 | |
|    a much better chance to turn out valid, especially in small bytecode loops.
 | |
| 
 | |
|    A mispredicted branch on a modern CPU flushes the whole pipeline and
 | |
|    can cost several CPU cycles (depending on the pipeline depth),
 | |
|    and potentially many more instructions (depending on the pipeline width).
 | |
|    A correctly predicted branch, however, is nearly free.
 | |
| 
 | |
|    At the time of this writing, the "threaded code" version is up to 15-20%
 | |
|    faster than the normal "switch" version, depending on the compiler and the
 | |
|    CPU architecture.
 | |
| 
 | |
|    NOTE: care must be taken that the compiler doesn't try to "optimize" the
 | |
|    indirect jumps by sharing them between all opcodes. Such optimizations
 | |
|    can be disabled on gcc by using the -fno-gcse flag (or possibly
 | |
|    -fno-crossjumping).
 | |
| */
 | |
| 
 | |
| /* Use macros rather than inline functions, to make it as clear as possible
 | |
|  * to the C compiler that the tracing check is a simple test then branch.
 | |
|  * We want to be sure that the compiler knows this before it generates
 | |
|  * the CFG.
 | |
|  */
 | |
| 
 | |
| #ifdef WITH_DTRACE
 | |
| #define OR_DTRACE_LINE | (PyDTrace_LINE_ENABLED() ? 255 : 0)
 | |
| #else
 | |
| #define OR_DTRACE_LINE
 | |
| #endif
 | |
| 
 | |
| #ifdef HAVE_COMPUTED_GOTOS
 | |
|     #ifndef USE_COMPUTED_GOTOS
 | |
|     #define USE_COMPUTED_GOTOS 1
 | |
|     #endif
 | |
| #else
 | |
|     #if defined(USE_COMPUTED_GOTOS) && USE_COMPUTED_GOTOS
 | |
|     #error "Computed gotos are not supported on this compiler."
 | |
|     #endif
 | |
|     #undef USE_COMPUTED_GOTOS
 | |
|     #define USE_COMPUTED_GOTOS 0
 | |
| #endif
 | |
| 
 | |
| #ifdef Py_STATS
 | |
| #define INSTRUCTION_START(op) \
 | |
|     do { \
 | |
|         frame->prev_instr = next_instr++; \
 | |
|         OPCODE_EXE_INC(op); \
 | |
|         if (_py_stats) _py_stats->opcode_stats[lastopcode].pair_count[op]++; \
 | |
|         lastopcode = op; \
 | |
|     } while (0)
 | |
| #else
 | |
| #define INSTRUCTION_START(op) (frame->prev_instr = next_instr++)
 | |
| #endif
 | |
| 
 | |
| #if USE_COMPUTED_GOTOS
 | |
| #define TARGET(op) TARGET_##op: INSTRUCTION_START(op);
 | |
| #define DISPATCH_GOTO() goto *opcode_targets[opcode]
 | |
| #else
 | |
| #define TARGET(op) case op: INSTRUCTION_START(op);
 | |
| #define DISPATCH_GOTO() goto dispatch_opcode
 | |
| #endif
 | |
| 
 | |
| /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */
 | |
| #ifdef LLTRACE
 | |
| #define PRE_DISPATCH_GOTO() if (lltrace) { \
 | |
|     lltrace_instruction(frame, stack_pointer, next_instr); }
 | |
| #else
 | |
| #define PRE_DISPATCH_GOTO() ((void)0)
 | |
| #endif
 | |
| 
 | |
| 
 | |
| /* Do interpreter dispatch accounting for tracing and instrumentation */
 | |
| #define DISPATCH() \
 | |
|     { \
 | |
|         NEXTOPARG(); \
 | |
|         PRE_DISPATCH_GOTO(); \
 | |
|         assert(cframe.use_tracing == 0 || cframe.use_tracing == 255); \
 | |
|         opcode |= cframe.use_tracing OR_DTRACE_LINE; \
 | |
|         DISPATCH_GOTO(); \
 | |
|     }
 | |
| 
 | |
| #define DISPATCH_SAME_OPARG() \
 | |
|     { \
 | |
|         opcode = _Py_OPCODE(*next_instr); \
 | |
|         PRE_DISPATCH_GOTO(); \
 | |
|         opcode |= cframe.use_tracing OR_DTRACE_LINE; \
 | |
|         DISPATCH_GOTO(); \
 | |
|     }
 | |
| 
 | |
| #define CHECK_EVAL_BREAKER() \
 | |
|     _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); \
 | |
|     if (_Py_atomic_load_relaxed_int32(eval_breaker)) { \
 | |
|         goto handle_eval_breaker; \
 | |
|     }
 | |
| 
 | |
| 
 | |
| /* Tuple access macros */
 | |
| 
 | |
| #ifndef Py_DEBUG
 | |
| #define GETITEM(v, i) PyTuple_GET_ITEM((v), (i))
 | |
| #else
 | |
| static inline PyObject *
 | |
| GETITEM(PyObject *v, Py_ssize_t i) {
 | |
|     assert(PyTuple_Check(v));
 | |
|     assert(i >= 0);
 | |
|     assert(i < PyTuple_GET_SIZE(v));
 | |
|     return PyTuple_GET_ITEM(v, i);
 | |
| }
 | |
| #endif
 | |
| 
 | |
| /* Code access macros */
 | |
| 
 | |
| /* The integer overflow is checked by an assertion below. */
 | |
| #define INSTR_OFFSET() ((int)(next_instr - _PyCode_CODE(frame->f_code)))
 | |
| #define NEXTOPARG()  do { \
 | |
|         _Py_CODEUNIT word = *next_instr; \
 | |
|         opcode = _Py_OPCODE(word); \
 | |
|         oparg = _Py_OPARG(word); \
 | |
|     } while (0)
 | |
| #define JUMPTO(x)       (next_instr = _PyCode_CODE(frame->f_code) + (x))
 | |
| #define JUMPBY(x)       (next_instr += (x))
 | |
| 
 | |
| /* OpCode prediction macros
 | |
|     Some opcodes tend to come in pairs thus making it possible to
 | |
|     predict the second code when the first is run.  For example,
 | |
|     COMPARE_OP is often followed by POP_JUMP_IF_FALSE or POP_JUMP_IF_TRUE.
 | |
| 
 | |
|     Verifying the prediction costs a single high-speed test of a register
 | |
|     variable against a constant.  If the pairing was good, then the
 | |
|     processor's own internal branch predication has a high likelihood of
 | |
|     success, resulting in a nearly zero-overhead transition to the
 | |
|     next opcode.  A successful prediction saves a trip through the eval-loop
 | |
|     including its unpredictable switch-case branch.  Combined with the
 | |
|     processor's internal branch prediction, a successful PREDICT has the
 | |
|     effect of making the two opcodes run as if they were a single new opcode
 | |
|     with the bodies combined.
 | |
| 
 | |
|     If collecting opcode statistics, your choices are to either keep the
 | |
|     predictions turned-on and interpret the results as if some opcodes
 | |
|     had been combined or turn-off predictions so that the opcode frequency
 | |
|     counter updates for both opcodes.
 | |
| 
 | |
|     Opcode prediction is disabled with threaded code, since the latter allows
 | |
|     the CPU to record separate branch prediction information for each
 | |
|     opcode.
 | |
| 
 | |
| */
 | |
| 
 | |
| #define PREDICT_ID(op)          PRED_##op
 | |
| 
 | |
| #if USE_COMPUTED_GOTOS
 | |
| #define PREDICT(op)             if (0) goto PREDICT_ID(op)
 | |
| #else
 | |
| #define PREDICT(op) \
 | |
|     do { \
 | |
|         _Py_CODEUNIT word = *next_instr; \
 | |
|         opcode = _Py_OPCODE(word) | cframe.use_tracing OR_DTRACE_LINE; \
 | |
|         if (opcode == op) { \
 | |
|             oparg = _Py_OPARG(word); \
 | |
|             INSTRUCTION_START(op); \
 | |
|             goto PREDICT_ID(op); \
 | |
|         } \
 | |
|     } while(0)
 | |
| #endif
 | |
| #define PREDICTED(op)           PREDICT_ID(op):
 | |
| 
 | |
| 
 | |
| /* Stack manipulation macros */
 | |
| 
 | |
| /* The stack can grow at most MAXINT deep, as co_nlocals and
 | |
|    co_stacksize are ints. */
 | |
| #define STACK_LEVEL()     ((int)(stack_pointer - _PyFrame_Stackbase(frame)))
 | |
| #define STACK_SIZE()      (frame->f_code->co_stacksize)
 | |
| #define EMPTY()           (STACK_LEVEL() == 0)
 | |
| #define TOP()             (stack_pointer[-1])
 | |
| #define SECOND()          (stack_pointer[-2])
 | |
| #define THIRD()           (stack_pointer[-3])
 | |
| #define FOURTH()          (stack_pointer[-4])
 | |
| #define PEEK(n)           (stack_pointer[-(n)])
 | |
| #define POKE(n, v)        (stack_pointer[-(n)] = (v))
 | |
| #define SET_TOP(v)        (stack_pointer[-1] = (v))
 | |
| #define SET_SECOND(v)     (stack_pointer[-2] = (v))
 | |
| #define BASIC_STACKADJ(n) (stack_pointer += n)
 | |
| #define BASIC_PUSH(v)     (*stack_pointer++ = (v))
 | |
| #define BASIC_POP()       (*--stack_pointer)
 | |
| 
 | |
| #ifdef Py_DEBUG
 | |
| #define PUSH(v)         do { \
 | |
|                             BASIC_PUSH(v); \
 | |
|                             assert(STACK_LEVEL() <= STACK_SIZE()); \
 | |
|                         } while (0)
 | |
| #define POP()           (assert(STACK_LEVEL() > 0), BASIC_POP())
 | |
| #define STACK_GROW(n)   do { \
 | |
|                             assert(n >= 0); \
 | |
|                             BASIC_STACKADJ(n); \
 | |
|                             assert(STACK_LEVEL() <= STACK_SIZE()); \
 | |
|                         } while (0)
 | |
| #define STACK_SHRINK(n) do { \
 | |
|                             assert(n >= 0); \
 | |
|                             assert(STACK_LEVEL() >= n); \
 | |
|                             BASIC_STACKADJ(-(n)); \
 | |
|                         } while (0)
 | |
| #else
 | |
| #define PUSH(v)                BASIC_PUSH(v)
 | |
| #define POP()                  BASIC_POP()
 | |
| #define STACK_GROW(n)          BASIC_STACKADJ(n)
 | |
| #define STACK_SHRINK(n)        BASIC_STACKADJ(-(n))
 | |
| #endif
 | |
| 
 | |
| /* Local variable macros */
 | |
| 
 | |
| #define GETLOCAL(i)     (frame->localsplus[i])
 | |
| 
 | |
| /* The SETLOCAL() macro must not DECREF the local variable in-place and
 | |
|    then store the new value; it must copy the old value to a temporary
 | |
|    value, then store the new value, and then DECREF the temporary value.
 | |
|    This is because it is possible that during the DECREF the frame is
 | |
|    accessed by other code (e.g. a __del__ method or gc.collect()) and the
 | |
|    variable would be pointing to already-freed memory. */
 | |
| #define SETLOCAL(i, value)      do { PyObject *tmp = GETLOCAL(i); \
 | |
|                                      GETLOCAL(i) = value; \
 | |
|                                      Py_XDECREF(tmp); } while (0)
 | |
| 
 | |
| #define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op)
 | |
| 
 | |
| #ifdef Py_STATS
 | |
| #define UPDATE_MISS_STATS(INSTNAME)                              \
 | |
|     do {                                                         \
 | |
|         STAT_INC(opcode, miss);                                  \
 | |
|         STAT_INC((INSTNAME), miss);                              \
 | |
|         /* The counter is always the first cache entry: */       \
 | |
|         if (ADAPTIVE_COUNTER_IS_ZERO(*next_instr)) {             \
 | |
|             STAT_INC((INSTNAME), deopt);                         \
 | |
|         }                                                        \
 | |
|         else {                                                   \
 | |
|             /* This is about to be (incorrectly) incremented: */ \
 | |
|             STAT_DEC((INSTNAME), deferred);                      \
 | |
|         }                                                        \
 | |
|     } while (0)
 | |
| #else
 | |
| #define UPDATE_MISS_STATS(INSTNAME) ((void)0)
 | |
| #endif
 | |
| 
 | |
| #define DEOPT_IF(COND, INSTNAME)                            \
 | |
|     if ((COND)) {                                           \
 | |
|         /* This is only a single jump on release builds! */ \
 | |
|         UPDATE_MISS_STATS((INSTNAME));                      \
 | |
|         assert(_PyOpcode_Deopt[opcode] == (INSTNAME));      \
 | |
|         GO_TO_INSTRUCTION(INSTNAME);                        \
 | |
|     }
 | |
| 
 | |
| 
 | |
| #define GLOBALS() frame->f_globals
 | |
| #define BUILTINS() frame->f_builtins
 | |
| #define LOCALS() frame->f_locals
 | |
| 
 | |
| /* Shared opcode macros */
 | |
| 
 | |
| #define TRACE_FUNCTION_EXIT() \
 | |
|     if (cframe.use_tracing) { \
 | |
|         if (trace_function_exit(tstate, frame, retval)) { \
 | |
|             Py_DECREF(retval); \
 | |
|             goto exit_unwind; \
 | |
|         } \
 | |
|     }
 | |
| 
 | |
| #define DTRACE_FUNCTION_EXIT() \
 | |
|     if (PyDTrace_FUNCTION_RETURN_ENABLED()) { \
 | |
|         dtrace_function_return(frame); \
 | |
|     }
 | |
| 
 | |
| #define TRACE_FUNCTION_UNWIND()  \
 | |
|     if (cframe.use_tracing) { \
 | |
|         /* Since we are already unwinding, \
 | |
|          * we don't care if this raises */ \
 | |
|         trace_function_exit(tstate, frame, NULL); \
 | |
|     }
 | |
| 
 | |
| #define TRACE_FUNCTION_ENTRY() \
 | |
|     if (cframe.use_tracing) { \
 | |
|         _PyFrame_SetStackPointer(frame, stack_pointer); \
 | |
|         int err = trace_function_entry(tstate, frame); \
 | |
|         stack_pointer = _PyFrame_GetStackPointer(frame); \
 | |
|         if (err) { \
 | |
|             goto error; \
 | |
|         } \
 | |
|     }
 | |
| 
 | |
| #define TRACE_FUNCTION_THROW_ENTRY() \
 | |
|     if (cframe.use_tracing) { \
 | |
|         assert(frame->stacktop >= 0); \
 | |
|         if (trace_function_entry(tstate, frame)) { \
 | |
|             goto exit_unwind; \
 | |
|         } \
 | |
|     }
 | |
| 
 | |
| #define DTRACE_FUNCTION_ENTRY()  \
 | |
|     if (PyDTrace_FUNCTION_ENTRY_ENABLED()) { \
 | |
|         dtrace_function_entry(frame); \
 | |
|     }
 | |
| 
 | |
| #define ADAPTIVE_COUNTER_IS_ZERO(COUNTER) \
 | |
|     (((COUNTER) >> ADAPTIVE_BACKOFF_BITS) == 0)
 | |
| 
 | |
| #define ADAPTIVE_COUNTER_IS_MAX(COUNTER) \
 | |
|     (((COUNTER) >> ADAPTIVE_BACKOFF_BITS) == ((1 << MAX_BACKOFF_VALUE) - 1))
 | |
| 
 | |
| #define DECREMENT_ADAPTIVE_COUNTER(COUNTER)           \
 | |
|     do {                                              \
 | |
|         assert(!ADAPTIVE_COUNTER_IS_ZERO((COUNTER))); \
 | |
|         (COUNTER) -= (1 << ADAPTIVE_BACKOFF_BITS);    \
 | |
|     } while (0);
 | |
| 
 | |
| #define INCREMENT_ADAPTIVE_COUNTER(COUNTER)          \
 | |
|     do {                                             \
 | |
|         assert(!ADAPTIVE_COUNTER_IS_MAX((COUNTER))); \
 | |
|         (COUNTER) += (1 << ADAPTIVE_BACKOFF_BITS);   \
 | |
|     } while (0);
 | |
| 
 | |
| static int
 | |
| trace_function_entry(PyThreadState *tstate, _PyInterpreterFrame *frame)
 | |
| {
 | |
|     if (tstate->c_tracefunc != NULL) {
 | |
|         /* tstate->c_tracefunc, if defined, is a
 | |
|             function that will be called on *every* entry
 | |
|             to a code block.  Its return value, if not
 | |
|             None, is a function that will be called at
 | |
|             the start of each executed line of code.
 | |
|             (Actually, the function must return itself
 | |
|             in order to continue tracing.)  The trace
 | |
|             functions are called with three arguments:
 | |
|             a pointer to the current frame, a string
 | |
|             indicating why the function is called, and
 | |
|             an argument which depends on the situation.
 | |
|             The global trace function is also called
 | |
|             whenever an exception is detected. */
 | |
|         if (call_trace_protected(tstate->c_tracefunc,
 | |
|                                     tstate->c_traceobj,
 | |
|                                     tstate, frame,
 | |
|                                     PyTrace_CALL, Py_None)) {
 | |
|             /* Trace function raised an error */
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     if (tstate->c_profilefunc != NULL) {
 | |
|         /* Similar for c_profilefunc, except it needn't
 | |
|             return itself and isn't called for "line" events */
 | |
|         if (call_trace_protected(tstate->c_profilefunc,
 | |
|                                     tstate->c_profileobj,
 | |
|                                     tstate, frame,
 | |
|                                     PyTrace_CALL, Py_None)) {
 | |
|             /* Profile function raised an error */
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static int
 | |
| trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *retval)
 | |
| {
 | |
|     if (tstate->c_tracefunc) {
 | |
|         if (call_trace_protected(tstate->c_tracefunc, tstate->c_traceobj,
 | |
|                                     tstate, frame, PyTrace_RETURN, retval)) {
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     if (tstate->c_profilefunc) {
 | |
|         if (call_trace_protected(tstate->c_profilefunc, tstate->c_profileobj,
 | |
|                                     tstate, frame, PyTrace_RETURN, retval)) {
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static _PyInterpreterFrame *
 | |
| pop_frame(PyThreadState *tstate, _PyInterpreterFrame *frame)
 | |
| {
 | |
|     _PyInterpreterFrame *prev_frame = frame->previous;
 | |
|     _PyEvalFrameClearAndPop(tstate, frame);
 | |
|     return prev_frame;
 | |
| }
 | |
| 
 | |
| 
 | |
| int _Py_CheckRecursiveCallPy(
 | |
|     PyThreadState *tstate)
 | |
| {
 | |
|     if (tstate->recursion_headroom) {
 | |
|         if (tstate->py_recursion_remaining < -50) {
 | |
|             /* Overflowing while handling an overflow. Give up. */
 | |
|             Py_FatalError("Cannot recover from Python stack overflow.");
 | |
|         }
 | |
|     }
 | |
|     else {
 | |
|         if (tstate->py_recursion_remaining <= 0) {
 | |
|             tstate->recursion_headroom++;
 | |
|             _PyErr_Format(tstate, PyExc_RecursionError,
 | |
|                         "maximum recursion depth exceeded");
 | |
|             tstate->recursion_headroom--;
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static inline int _Py_EnterRecursivePy(PyThreadState *tstate) {
 | |
|     return (tstate->py_recursion_remaining-- <= 0) &&
 | |
|         _Py_CheckRecursiveCallPy(tstate);
 | |
| }
 | |
| 
 | |
| 
 | |
| static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate)  {
 | |
|     tstate->py_recursion_remaining++;
 | |
| }
 | |
| 
 | |
| 
 | |
| // GH-89279: Must be a macro to be sure it's inlined by MSVC.
 | |
| #define is_method(stack_pointer, args) (PEEK((args)+2) != NULL)
 | |
| 
 | |
| #define KWNAMES_LEN() \
 | |
|     (kwnames == NULL ? 0 : ((int)PyTuple_GET_SIZE(kwnames)))
 | |
| 
 | |
| PyObject* _Py_HOT_FUNCTION
 | |
| _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag)
 | |
| {
 | |
|     _Py_EnsureTstateNotNULL(tstate);
 | |
|     CALL_STAT_INC(pyeval_calls);
 | |
| 
 | |
| #if USE_COMPUTED_GOTOS
 | |
| /* Import the static jump table */
 | |
| #include "opcode_targets.h"
 | |
| #endif
 | |
| 
 | |
| #ifdef Py_STATS
 | |
|     int lastopcode = 0;
 | |
| #endif
 | |
|     // opcode is an 8-bit value to improve the code generated by MSVC
 | |
|     // for the big switch below (in combination with the EXTRA_CASES macro).
 | |
|     uint8_t opcode;        /* Current opcode */
 | |
|     int oparg;         /* Current opcode argument, if any */
 | |
|     _Py_atomic_int * const eval_breaker = &tstate->interp->ceval.eval_breaker;
 | |
| #ifdef LLTRACE
 | |
|     int lltrace = 0;
 | |
| #endif
 | |
| 
 | |
|     _PyCFrame cframe;
 | |
|     _PyInterpreterFrame  entry_frame;
 | |
|     PyObject *kwnames = NULL; // Borrowed reference. Reset by CALL instructions.
 | |
| 
 | |
|     /* WARNING: Because the _PyCFrame lives on the C stack,
 | |
|      * but can be accessed from a heap allocated object (tstate)
 | |
|      * strict stack discipline must be maintained.
 | |
|      */
 | |
|     _PyCFrame *prev_cframe = tstate->cframe;
 | |
|     cframe.use_tracing = prev_cframe->use_tracing;
 | |
|     cframe.previous = prev_cframe;
 | |
|     tstate->cframe = &cframe;
 | |
| 
 | |
|     assert(tstate->interp->interpreter_trampoline != NULL);
 | |
| #ifdef Py_DEBUG
 | |
|     /* Set these to invalid but identifiable values for debugging. */
 | |
|     entry_frame.f_funcobj = (PyObject*)0xaaa0;
 | |
|     entry_frame.f_locals = (PyObject*)0xaaa1;
 | |
|     entry_frame.frame_obj = (PyFrameObject*)0xaaa2;
 | |
|     entry_frame.f_globals = (PyObject*)0xaaa3;
 | |
|     entry_frame.f_builtins = (PyObject*)0xaaa4;
 | |
| #endif
 | |
|     entry_frame.f_code = tstate->interp->interpreter_trampoline;
 | |
|     entry_frame.prev_instr =
 | |
|         _PyCode_CODE(tstate->interp->interpreter_trampoline);
 | |
|     entry_frame.stacktop = 0;
 | |
|     entry_frame.owner = FRAME_OWNED_BY_CSTACK;
 | |
|     entry_frame.yield_offset = 0;
 | |
|     /* Push frame */
 | |
|     entry_frame.previous = prev_cframe->current_frame;
 | |
|     frame->previous = &entry_frame;
 | |
|     cframe.current_frame = frame;
 | |
| 
 | |
|     if (_Py_EnterRecursiveCallTstate(tstate, "")) {
 | |
|         tstate->c_recursion_remaining--;
 | |
|         tstate->py_recursion_remaining--;
 | |
|         goto exit_unwind;
 | |
|     }
 | |
| 
 | |
|     /* support for generator.throw() */
 | |
|     if (throwflag) {
 | |
|         if (_Py_EnterRecursivePy(tstate)) {
 | |
|             goto exit_unwind;
 | |
|         }
 | |
|         TRACE_FUNCTION_THROW_ENTRY();
 | |
|         DTRACE_FUNCTION_ENTRY();
 | |
|         goto resume_with_error;
 | |
|     }
 | |
| 
 | |
|     /* Local "register" variables.
 | |
|      * These are cached values from the frame and code object.  */
 | |
| 
 | |
|     PyObject *names;
 | |
|     PyObject *consts;
 | |
|     _Py_CODEUNIT *next_instr;
 | |
|     PyObject **stack_pointer;
 | |
| 
 | |
| /* Sets the above local variables from the frame */
 | |
| #define SET_LOCALS_FROM_FRAME() \
 | |
|     { \
 | |
|         PyCodeObject *co = frame->f_code; \
 | |
|         names = co->co_names; \
 | |
|         consts = co->co_consts; \
 | |
|     } \
 | |
|     assert(_PyInterpreterFrame_LASTI(frame) >= -1); \
 | |
|     /* Jump back to the last instruction executed... */ \
 | |
|     next_instr = frame->prev_instr + 1; \
 | |
|     stack_pointer = _PyFrame_GetStackPointer(frame); \
 | |
|     /* Set stackdepth to -1. \
 | |
|         Update when returning or calling trace function. \
 | |
|         Having stackdepth <= 0 ensures that invalid \
 | |
|         values are not visible to the cycle GC. \
 | |
|         We choose -1 rather than 0 to assist debugging. \
 | |
|         */ \
 | |
|     frame->stacktop = -1;
 | |
| 
 | |
| 
 | |
| start_frame:
 | |
|     if (_Py_EnterRecursivePy(tstate)) {
 | |
|         goto exit_unwind;
 | |
|     }
 | |
| 
 | |
| resume_frame:
 | |
|     SET_LOCALS_FROM_FRAME();
 | |
| 
 | |
| #ifdef LLTRACE
 | |
|     {
 | |
|         if (frame != &entry_frame) {
 | |
|             int r = PyDict_Contains(GLOBALS(), &_Py_ID(__lltrace__));
 | |
|             if (r < 0) {
 | |
|                 goto exit_unwind;
 | |
|             }
 | |
|             lltrace = r;
 | |
|         }
 | |
|         if (lltrace) {
 | |
|             lltrace_resume_frame(frame);
 | |
|         }
 | |
|     }
 | |
| #endif
 | |
| 
 | |
| #ifdef Py_DEBUG
 | |
|     /* _PyEval_EvalFrameDefault() must not be called with an exception set,
 | |
|        because it can clear it (directly or indirectly) and so the
 | |
|        caller loses its exception */
 | |
|     assert(!_PyErr_Occurred(tstate));
 | |
| #endif
 | |
| 
 | |
|     DISPATCH();
 | |
| 
 | |
| handle_eval_breaker:
 | |
| 
 | |
|     /* Do periodic things, like check for signals and async I/0.
 | |
|      * We need to do reasonably frequently, but not too frequently.
 | |
|      * All loops should include a check of the eval breaker.
 | |
|      * We also check on return from any builtin function.
 | |
|      */
 | |
|     if (_Py_HandlePending(tstate) != 0) {
 | |
|         goto error;
 | |
|     }
 | |
|     DISPATCH();
 | |
| 
 | |
|     {
 | |
|     /* Start instructions */
 | |
| #if !USE_COMPUTED_GOTOS
 | |
|     dispatch_opcode:
 | |
|         switch (opcode)
 | |
| #endif
 | |
|         {
 | |
| 
 | |
| #include "generated_cases.c.h"
 | |
| 
 | |
| #if USE_COMPUTED_GOTOS
 | |
|         TARGET_DO_TRACING:
 | |
| #else
 | |
|         case DO_TRACING:
 | |
| #endif
 | |
|     {
 | |
|         assert(cframe.use_tracing);
 | |
|         assert(tstate->tracing == 0);
 | |
|         if (INSTR_OFFSET() >= frame->f_code->_co_firsttraceable) {
 | |
|             int instr_prev = _PyInterpreterFrame_LASTI(frame);
 | |
|             frame->prev_instr = next_instr;
 | |
|             NEXTOPARG();
 | |
|             // No _PyOpcode_Deopt here, since RESUME has no optimized forms:
 | |
|             if (opcode == RESUME) {
 | |
|                 if (oparg < 2) {
 | |
|                     CHECK_EVAL_BREAKER();
 | |
|                 }
 | |
|                 /* Call tracing */
 | |
|                 TRACE_FUNCTION_ENTRY();
 | |
|                 DTRACE_FUNCTION_ENTRY();
 | |
|             }
 | |
|             else {
 | |
|                 /* line-by-line tracing support */
 | |
|                 if (PyDTrace_LINE_ENABLED()) {
 | |
|                     maybe_dtrace_line(frame, &tstate->trace_info, instr_prev);
 | |
|                 }
 | |
| 
 | |
|                 if (cframe.use_tracing &&
 | |
|                     tstate->c_tracefunc != NULL && !tstate->tracing) {
 | |
|                     int err;
 | |
|                     /* see maybe_call_line_trace()
 | |
|                     for expository comments */
 | |
|                     _PyFrame_SetStackPointer(frame, stack_pointer);
 | |
| 
 | |
|                     err = maybe_call_line_trace(tstate->c_tracefunc,
 | |
|                                                 tstate->c_traceobj,
 | |
|                                                 tstate, frame, instr_prev);
 | |
|                     // Reload possibly changed frame fields:
 | |
|                     stack_pointer = _PyFrame_GetStackPointer(frame);
 | |
|                     frame->stacktop = -1;
 | |
|                     // next_instr is only reloaded if tracing *does not* raise.
 | |
|                     // This is consistent with the behavior of older Python
 | |
|                     // versions. If a trace function sets a new f_lineno and
 | |
|                     // *then* raises, we use the *old* location when searching
 | |
|                     // for an exception handler, displaying the traceback, and
 | |
|                     // so on:
 | |
|                     if (err) {
 | |
|                         // next_instr wasn't incremented at the start of this
 | |
|                         // instruction. Increment it before handling the error,
 | |
|                         // so that it looks the same as a "normal" instruction:
 | |
|                         next_instr++;
 | |
|                         goto error;
 | |
|                     }
 | |
|                     // Reload next_instr. Don't increment it, though, since
 | |
|                     // we're going to re-dispatch to the "true" instruction now:
 | |
|                     next_instr = frame->prev_instr;
 | |
|                 }
 | |
|             }
 | |
|         }
 | |
|         NEXTOPARG();
 | |
|         PRE_DISPATCH_GOTO();
 | |
|         // No _PyOpcode_Deopt here, since EXTENDED_ARG has no optimized forms:
 | |
|         while (opcode == EXTENDED_ARG) {
 | |
|             // CPython hasn't ever traced the instruction after an EXTENDED_ARG.
 | |
|             // Inline the EXTENDED_ARG here, so we can avoid branching there:
 | |
|             INSTRUCTION_START(EXTENDED_ARG);
 | |
|             opcode = _Py_OPCODE(*next_instr);
 | |
|             oparg = oparg << 8 | _Py_OPARG(*next_instr);
 | |
|             // Make sure the next instruction isn't a RESUME, since that needs
 | |
|             // to trace properly (and shouldn't have an EXTENDED_ARG, anyways):
 | |
|             assert(opcode != RESUME);
 | |
|             PRE_DISPATCH_GOTO();
 | |
|         }
 | |
|         opcode = _PyOpcode_Deopt[opcode];
 | |
|         if (_PyOpcode_Caches[opcode]) {
 | |
|             _Py_CODEUNIT *counter = &next_instr[1];
 | |
|             // The instruction is going to decrement the counter, so we need to
 | |
|             // increment it here to make sure it doesn't try to specialize:
 | |
|             if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) {
 | |
|                 INCREMENT_ADAPTIVE_COUNTER(*counter);
 | |
|             }
 | |
|         }
 | |
|         DISPATCH_GOTO();
 | |
|     }
 | |
| 
 | |
| #if USE_COMPUTED_GOTOS
 | |
|         _unknown_opcode:
 | |
| #else
 | |
|         EXTRA_CASES  // From opcode.h, a 'case' for each unused opcode
 | |
| #endif
 | |
|             /* Tell C compilers not to hold the opcode variable in the loop.
 | |
|                next_instr points the current instruction without TARGET(). */
 | |
|             opcode = _Py_OPCODE(*next_instr);
 | |
|             _PyErr_Format(tstate, PyExc_SystemError,
 | |
|                           "%U:%d: unknown opcode %d",
 | |
|                           frame->f_code->co_filename,
 | |
|                           _PyInterpreterFrame_GetLine(frame),
 | |
|                           opcode);
 | |
|             goto error;
 | |
| 
 | |
|         } /* End instructions */
 | |
| 
 | |
|         /* This should never be reached. Every opcode should end with DISPATCH()
 | |
|            or goto error. */
 | |
|         Py_UNREACHABLE();
 | |
| 
 | |
| unbound_local_error:
 | |
|         {
 | |
|             format_exc_check_arg(tstate, PyExc_UnboundLocalError,
 | |
|                 UNBOUNDLOCAL_ERROR_MSG,
 | |
|                 PyTuple_GetItem(frame->f_code->co_localsplusnames, oparg)
 | |
|             );
 | |
|             goto error;
 | |
|         }
 | |
| 
 | |
| pop_4_error:
 | |
|     STACK_SHRINK(1);
 | |
| pop_3_error:
 | |
|     STACK_SHRINK(1);
 | |
| pop_2_error:
 | |
|     STACK_SHRINK(1);
 | |
| pop_1_error:
 | |
|     STACK_SHRINK(1);
 | |
| error:
 | |
|         kwnames = NULL;
 | |
|         /* Double-check exception status. */
 | |
| #ifdef NDEBUG
 | |
|         if (!_PyErr_Occurred(tstate)) {
 | |
|             _PyErr_SetString(tstate, PyExc_SystemError,
 | |
|                              "error return without exception set");
 | |
|         }
 | |
| #else
 | |
|         assert(_PyErr_Occurred(tstate));
 | |
| #endif
 | |
| 
 | |
|         /* Log traceback info. */
 | |
|         assert(frame != &entry_frame);
 | |
|         if (!_PyFrame_IsIncomplete(frame)) {
 | |
|             PyFrameObject *f = _PyFrame_GetFrameObject(frame);
 | |
|             if (f != NULL) {
 | |
|                 PyTraceBack_Here(f);
 | |
|             }
 | |
|         }
 | |
| 
 | |
|         if (tstate->c_tracefunc != NULL) {
 | |
|             /* Make sure state is set to FRAME_UNWINDING for tracing */
 | |
|             call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj,
 | |
|                            tstate, frame);
 | |
|         }
 | |
| 
 | |
| exception_unwind:
 | |
|         {
 | |
|             /* We can't use frame->f_lasti here, as RERAISE may have set it */
 | |
|             int offset = INSTR_OFFSET()-1;
 | |
|             int level, handler, lasti;
 | |
|             if (get_exception_handler(frame->f_code, offset, &level, &handler, &lasti) == 0) {
 | |
|                 // No handlers, so exit.
 | |
|                 assert(_PyErr_Occurred(tstate));
 | |
| 
 | |
|                 /* Pop remaining stack entries. */
 | |
|                 PyObject **stackbase = _PyFrame_Stackbase(frame);
 | |
|                 while (stack_pointer > stackbase) {
 | |
|                     PyObject *o = POP();
 | |
|                     Py_XDECREF(o);
 | |
|                 }
 | |
|                 assert(STACK_LEVEL() == 0);
 | |
|                 _PyFrame_SetStackPointer(frame, stack_pointer);
 | |
|                 TRACE_FUNCTION_UNWIND();
 | |
|                 DTRACE_FUNCTION_EXIT();
 | |
|                 goto exit_unwind;
 | |
|             }
 | |
| 
 | |
|             assert(STACK_LEVEL() >= level);
 | |
|             PyObject **new_top = _PyFrame_Stackbase(frame) + level;
 | |
|             while (stack_pointer > new_top) {
 | |
|                 PyObject *v = POP();
 | |
|                 Py_XDECREF(v);
 | |
|             }
 | |
|             PyObject *exc, *val, *tb;
 | |
|             if (lasti) {
 | |
|                 int frame_lasti = _PyInterpreterFrame_LASTI(frame);
 | |
|                 PyObject *lasti = PyLong_FromLong(frame_lasti);
 | |
|                 if (lasti == NULL) {
 | |
|                     goto exception_unwind;
 | |
|                 }
 | |
|                 PUSH(lasti);
 | |
|             }
 | |
|             _PyErr_Fetch(tstate, &exc, &val, &tb);
 | |
|             /* Make the raw exception data
 | |
|                 available to the handler,
 | |
|                 so a program can emulate the
 | |
|                 Python main loop. */
 | |
|             _PyErr_NormalizeException(tstate, &exc, &val, &tb);
 | |
|             if (tb != NULL)
 | |
|                 PyException_SetTraceback(val, tb);
 | |
|             else
 | |
|                 PyException_SetTraceback(val, Py_None);
 | |
|             Py_XDECREF(tb);
 | |
|             Py_XDECREF(exc);
 | |
|             PUSH(val);
 | |
|             JUMPTO(handler);
 | |
|             /* Resume normal execution */
 | |
|             DISPATCH();
 | |
|         }
 | |
|     }
 | |
| 
 | |
| exit_unwind:
 | |
|     assert(_PyErr_Occurred(tstate));
 | |
|     _Py_LeaveRecursiveCallPy(tstate);
 | |
|     assert(frame != &entry_frame);
 | |
|     frame = cframe.current_frame = pop_frame(tstate, frame);
 | |
|     if (frame == &entry_frame) {
 | |
|         /* Restore previous cframe and exit */
 | |
|         tstate->cframe = cframe.previous;
 | |
|         tstate->cframe->use_tracing = cframe.use_tracing;
 | |
|         assert(tstate->cframe->current_frame == frame->previous);
 | |
|         _Py_LeaveRecursiveCallTstate(tstate);
 | |
|         return NULL;
 | |
|     }
 | |
| 
 | |
| resume_with_error:
 | |
|     SET_LOCALS_FROM_FRAME();
 | |
|     goto error;
 | |
| 
 | |
| }
 | |
| 
 | |
| static void
 | |
| format_missing(PyThreadState *tstate, const char *kind,
 | |
|                PyCodeObject *co, PyObject *names, PyObject *qualname)
 | |
| {
 | |
|     int err;
 | |
|     Py_ssize_t len = PyList_GET_SIZE(names);
 | |
|     PyObject *name_str, *comma, *tail, *tmp;
 | |
| 
 | |
|     assert(PyList_CheckExact(names));
 | |
|     assert(len >= 1);
 | |
|     /* Deal with the joys of natural language. */
 | |
|     switch (len) {
 | |
|     case 1:
 | |
|         name_str = PyList_GET_ITEM(names, 0);
 | |
|         Py_INCREF(name_str);
 | |
|         break;
 | |
|     case 2:
 | |
|         name_str = PyUnicode_FromFormat("%U and %U",
 | |
|                                         PyList_GET_ITEM(names, len - 2),
 | |
|                                         PyList_GET_ITEM(names, len - 1));
 | |
|         break;
 | |
|     default:
 | |
|         tail = PyUnicode_FromFormat(", %U, and %U",
 | |
|                                     PyList_GET_ITEM(names, len - 2),
 | |
|                                     PyList_GET_ITEM(names, len - 1));
 | |
|         if (tail == NULL)
 | |
|             return;
 | |
|         /* Chop off the last two objects in the list. This shouldn't actually
 | |
|            fail, but we can't be too careful. */
 | |
|         err = PyList_SetSlice(names, len - 2, len, NULL);
 | |
|         if (err == -1) {
 | |
|             Py_DECREF(tail);
 | |
|             return;
 | |
|         }
 | |
|         /* Stitch everything up into a nice comma-separated list. */
 | |
|         comma = PyUnicode_FromString(", ");
 | |
|         if (comma == NULL) {
 | |
|             Py_DECREF(tail);
 | |
|             return;
 | |
|         }
 | |
|         tmp = PyUnicode_Join(comma, names);
 | |
|         Py_DECREF(comma);
 | |
|         if (tmp == NULL) {
 | |
|             Py_DECREF(tail);
 | |
|             return;
 | |
|         }
 | |
|         name_str = PyUnicode_Concat(tmp, tail);
 | |
|         Py_DECREF(tmp);
 | |
|         Py_DECREF(tail);
 | |
|         break;
 | |
|     }
 | |
|     if (name_str == NULL)
 | |
|         return;
 | |
|     _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                   "%U() missing %i required %s argument%s: %U",
 | |
|                   qualname,
 | |
|                   len,
 | |
|                   kind,
 | |
|                   len == 1 ? "" : "s",
 | |
|                   name_str);
 | |
|     Py_DECREF(name_str);
 | |
| }
 | |
| 
 | |
| static void
 | |
| missing_arguments(PyThreadState *tstate, PyCodeObject *co,
 | |
|                   Py_ssize_t missing, Py_ssize_t defcount,
 | |
|                   PyObject **localsplus, PyObject *qualname)
 | |
| {
 | |
|     Py_ssize_t i, j = 0;
 | |
|     Py_ssize_t start, end;
 | |
|     int positional = (defcount != -1);
 | |
|     const char *kind = positional ? "positional" : "keyword-only";
 | |
|     PyObject *missing_names;
 | |
| 
 | |
|     /* Compute the names of the arguments that are missing. */
 | |
|     missing_names = PyList_New(missing);
 | |
|     if (missing_names == NULL)
 | |
|         return;
 | |
|     if (positional) {
 | |
|         start = 0;
 | |
|         end = co->co_argcount - defcount;
 | |
|     }
 | |
|     else {
 | |
|         start = co->co_argcount;
 | |
|         end = start + co->co_kwonlyargcount;
 | |
|     }
 | |
|     for (i = start; i < end; i++) {
 | |
|         if (localsplus[i] == NULL) {
 | |
|             PyObject *raw = PyTuple_GET_ITEM(co->co_localsplusnames, i);
 | |
|             PyObject *name = PyObject_Repr(raw);
 | |
|             if (name == NULL) {
 | |
|                 Py_DECREF(missing_names);
 | |
|                 return;
 | |
|             }
 | |
|             PyList_SET_ITEM(missing_names, j++, name);
 | |
|         }
 | |
|     }
 | |
|     assert(j == missing);
 | |
|     format_missing(tstate, kind, co, missing_names, qualname);
 | |
|     Py_DECREF(missing_names);
 | |
| }
 | |
| 
 | |
| static void
 | |
| too_many_positional(PyThreadState *tstate, PyCodeObject *co,
 | |
|                     Py_ssize_t given, PyObject *defaults,
 | |
|                     PyObject **localsplus, PyObject *qualname)
 | |
| {
 | |
|     int plural;
 | |
|     Py_ssize_t kwonly_given = 0;
 | |
|     Py_ssize_t i;
 | |
|     PyObject *sig, *kwonly_sig;
 | |
|     Py_ssize_t co_argcount = co->co_argcount;
 | |
| 
 | |
|     assert((co->co_flags & CO_VARARGS) == 0);
 | |
|     /* Count missing keyword-only args. */
 | |
|     for (i = co_argcount; i < co_argcount + co->co_kwonlyargcount; i++) {
 | |
|         if (localsplus[i] != NULL) {
 | |
|             kwonly_given++;
 | |
|         }
 | |
|     }
 | |
|     Py_ssize_t defcount = defaults == NULL ? 0 : PyTuple_GET_SIZE(defaults);
 | |
|     if (defcount) {
 | |
|         Py_ssize_t atleast = co_argcount - defcount;
 | |
|         plural = 1;
 | |
|         sig = PyUnicode_FromFormat("from %zd to %zd", atleast, co_argcount);
 | |
|     }
 | |
|     else {
 | |
|         plural = (co_argcount != 1);
 | |
|         sig = PyUnicode_FromFormat("%zd", co_argcount);
 | |
|     }
 | |
|     if (sig == NULL)
 | |
|         return;
 | |
|     if (kwonly_given) {
 | |
|         const char *format = " positional argument%s (and %zd keyword-only argument%s)";
 | |
|         kwonly_sig = PyUnicode_FromFormat(format,
 | |
|                                           given != 1 ? "s" : "",
 | |
|                                           kwonly_given,
 | |
|                                           kwonly_given != 1 ? "s" : "");
 | |
|         if (kwonly_sig == NULL) {
 | |
|             Py_DECREF(sig);
 | |
|             return;
 | |
|         }
 | |
|     }
 | |
|     else {
 | |
|         /* This will not fail. */
 | |
|         kwonly_sig = PyUnicode_FromString("");
 | |
|         assert(kwonly_sig != NULL);
 | |
|     }
 | |
|     _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                   "%U() takes %U positional argument%s but %zd%U %s given",
 | |
|                   qualname,
 | |
|                   sig,
 | |
|                   plural ? "s" : "",
 | |
|                   given,
 | |
|                   kwonly_sig,
 | |
|                   given == 1 && !kwonly_given ? "was" : "were");
 | |
|     Py_DECREF(sig);
 | |
|     Py_DECREF(kwonly_sig);
 | |
| }
 | |
| 
 | |
| static int
 | |
| positional_only_passed_as_keyword(PyThreadState *tstate, PyCodeObject *co,
 | |
|                                   Py_ssize_t kwcount, PyObject* kwnames,
 | |
|                                   PyObject *qualname)
 | |
| {
 | |
|     int posonly_conflicts = 0;
 | |
|     PyObject* posonly_names = PyList_New(0);
 | |
| 
 | |
|     for(int k=0; k < co->co_posonlyargcount; k++){
 | |
|         PyObject* posonly_name = PyTuple_GET_ITEM(co->co_localsplusnames, k);
 | |
| 
 | |
|         for (int k2=0; k2<kwcount; k2++){
 | |
|             /* Compare the pointers first and fallback to PyObject_RichCompareBool*/
 | |
|             PyObject* kwname = PyTuple_GET_ITEM(kwnames, k2);
 | |
|             if (kwname == posonly_name){
 | |
|                 if(PyList_Append(posonly_names, kwname) != 0) {
 | |
|                     goto fail;
 | |
|                 }
 | |
|                 posonly_conflicts++;
 | |
|                 continue;
 | |
|             }
 | |
| 
 | |
|             int cmp = PyObject_RichCompareBool(posonly_name, kwname, Py_EQ);
 | |
| 
 | |
|             if ( cmp > 0) {
 | |
|                 if(PyList_Append(posonly_names, kwname) != 0) {
 | |
|                     goto fail;
 | |
|                 }
 | |
|                 posonly_conflicts++;
 | |
|             } else if (cmp < 0) {
 | |
|                 goto fail;
 | |
|             }
 | |
| 
 | |
|         }
 | |
|     }
 | |
|     if (posonly_conflicts) {
 | |
|         PyObject* comma = PyUnicode_FromString(", ");
 | |
|         if (comma == NULL) {
 | |
|             goto fail;
 | |
|         }
 | |
|         PyObject* error_names = PyUnicode_Join(comma, posonly_names);
 | |
|         Py_DECREF(comma);
 | |
|         if (error_names == NULL) {
 | |
|             goto fail;
 | |
|         }
 | |
|         _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                       "%U() got some positional-only arguments passed"
 | |
|                       " as keyword arguments: '%U'",
 | |
|                       qualname, error_names);
 | |
|         Py_DECREF(error_names);
 | |
|         goto fail;
 | |
|     }
 | |
| 
 | |
|     Py_DECREF(posonly_names);
 | |
|     return 0;
 | |
| 
 | |
| fail:
 | |
|     Py_XDECREF(posonly_names);
 | |
|     return 1;
 | |
| 
 | |
| }
 | |
| 
 | |
| 
 | |
| static inline unsigned char *
 | |
| scan_back_to_entry_start(unsigned char *p) {
 | |
|     for (; (p[0]&128) == 0; p--);
 | |
|     return p;
 | |
| }
 | |
| 
 | |
| static inline unsigned char *
 | |
| skip_to_next_entry(unsigned char *p, unsigned char *end) {
 | |
|     while (p < end && ((p[0] & 128) == 0)) {
 | |
|         p++;
 | |
|     }
 | |
|     return p;
 | |
| }
 | |
| 
 | |
| 
 | |
| #define MAX_LINEAR_SEARCH 40
 | |
| 
 | |
| static int
 | |
| get_exception_handler(PyCodeObject *code, int index, int *level, int *handler, int *lasti)
 | |
| {
 | |
|     unsigned char *start = (unsigned char *)PyBytes_AS_STRING(code->co_exceptiontable);
 | |
|     unsigned char *end = start + PyBytes_GET_SIZE(code->co_exceptiontable);
 | |
|     /* Invariants:
 | |
|      * start_table == end_table OR
 | |
|      * start_table points to a legal entry and end_table points
 | |
|      * beyond the table or to a legal entry that is after index.
 | |
|      */
 | |
|     if (end - start > MAX_LINEAR_SEARCH) {
 | |
|         int offset;
 | |
|         parse_varint(start, &offset);
 | |
|         if (offset > index) {
 | |
|             return 0;
 | |
|         }
 | |
|         do {
 | |
|             unsigned char * mid = start + ((end-start)>>1);
 | |
|             mid = scan_back_to_entry_start(mid);
 | |
|             parse_varint(mid, &offset);
 | |
|             if (offset > index) {
 | |
|                 end = mid;
 | |
|             }
 | |
|             else {
 | |
|                 start = mid;
 | |
|             }
 | |
| 
 | |
|         } while (end - start > MAX_LINEAR_SEARCH);
 | |
|     }
 | |
|     unsigned char *scan = start;
 | |
|     while (scan < end) {
 | |
|         int start_offset, size;
 | |
|         scan = parse_varint(scan, &start_offset);
 | |
|         if (start_offset > index) {
 | |
|             break;
 | |
|         }
 | |
|         scan = parse_varint(scan, &size);
 | |
|         if (start_offset + size > index) {
 | |
|             scan = parse_varint(scan, handler);
 | |
|             int depth_and_lasti;
 | |
|             parse_varint(scan, &depth_and_lasti);
 | |
|             *level = depth_and_lasti >> 1;
 | |
|             *lasti = depth_and_lasti & 1;
 | |
|             return 1;
 | |
|         }
 | |
|         scan = skip_to_next_entry(scan, end);
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static int
 | |
| initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
 | |
|     PyObject **localsplus, PyObject *const *args,
 | |
|     Py_ssize_t argcount, PyObject *kwnames)
 | |
| {
 | |
|     PyCodeObject *co = (PyCodeObject*)func->func_code;
 | |
|     const Py_ssize_t total_args = co->co_argcount + co->co_kwonlyargcount;
 | |
| 
 | |
|     /* Create a dictionary for keyword parameters (**kwags) */
 | |
|     PyObject *kwdict;
 | |
|     Py_ssize_t i;
 | |
|     if (co->co_flags & CO_VARKEYWORDS) {
 | |
|         kwdict = PyDict_New();
 | |
|         if (kwdict == NULL) {
 | |
|             goto fail_pre_positional;
 | |
|         }
 | |
|         i = total_args;
 | |
|         if (co->co_flags & CO_VARARGS) {
 | |
|             i++;
 | |
|         }
 | |
|         assert(localsplus[i] == NULL);
 | |
|         localsplus[i] = kwdict;
 | |
|     }
 | |
|     else {
 | |
|         kwdict = NULL;
 | |
|     }
 | |
| 
 | |
|     /* Copy all positional arguments into local variables */
 | |
|     Py_ssize_t j, n;
 | |
|     if (argcount > co->co_argcount) {
 | |
|         n = co->co_argcount;
 | |
|     }
 | |
|     else {
 | |
|         n = argcount;
 | |
|     }
 | |
|     for (j = 0; j < n; j++) {
 | |
|         PyObject *x = args[j];
 | |
|         assert(localsplus[j] == NULL);
 | |
|         localsplus[j] = x;
 | |
|     }
 | |
| 
 | |
|     /* Pack other positional arguments into the *args argument */
 | |
|     if (co->co_flags & CO_VARARGS) {
 | |
|         PyObject *u = NULL;
 | |
|         if (argcount == n) {
 | |
|             u = Py_NewRef(&_Py_SINGLETON(tuple_empty));
 | |
|         }
 | |
|         else {
 | |
|             assert(args != NULL);
 | |
|             u = _PyTuple_FromArraySteal(args + n, argcount - n);
 | |
|         }
 | |
|         if (u == NULL) {
 | |
|             goto fail_post_positional;
 | |
|         }
 | |
|         assert(localsplus[total_args] == NULL);
 | |
|         localsplus[total_args] = u;
 | |
|     }
 | |
|     else if (argcount > n) {
 | |
|         /* Too many postional args. Error is reported later */
 | |
|         for (j = n; j < argcount; j++) {
 | |
|             Py_DECREF(args[j]);
 | |
|         }
 | |
|     }
 | |
| 
 | |
|     /* Handle keyword arguments */
 | |
|     if (kwnames != NULL) {
 | |
|         Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
 | |
|         for (i = 0; i < kwcount; i++) {
 | |
|             PyObject **co_varnames;
 | |
|             PyObject *keyword = PyTuple_GET_ITEM(kwnames, i);
 | |
|             PyObject *value = args[i+argcount];
 | |
|             Py_ssize_t j;
 | |
| 
 | |
|             if (keyword == NULL || !PyUnicode_Check(keyword)) {
 | |
|                 _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                             "%U() keywords must be strings",
 | |
|                           func->func_qualname);
 | |
|                 goto kw_fail;
 | |
|             }
 | |
| 
 | |
|             /* Speed hack: do raw pointer compares. As names are
 | |
|             normally interned this should almost always hit. */
 | |
|             co_varnames = ((PyTupleObject *)(co->co_localsplusnames))->ob_item;
 | |
|             for (j = co->co_posonlyargcount; j < total_args; j++) {
 | |
|                 PyObject *varname = co_varnames[j];
 | |
|                 if (varname == keyword) {
 | |
|                     goto kw_found;
 | |
|                 }
 | |
|             }
 | |
| 
 | |
|             /* Slow fallback, just in case */
 | |
|             for (j = co->co_posonlyargcount; j < total_args; j++) {
 | |
|                 PyObject *varname = co_varnames[j];
 | |
|                 int cmp = PyObject_RichCompareBool( keyword, varname, Py_EQ);
 | |
|                 if (cmp > 0) {
 | |
|                     goto kw_found;
 | |
|                 }
 | |
|                 else if (cmp < 0) {
 | |
|                     goto kw_fail;
 | |
|                 }
 | |
|             }
 | |
| 
 | |
|             assert(j >= total_args);
 | |
|             if (kwdict == NULL) {
 | |
| 
 | |
|                 if (co->co_posonlyargcount
 | |
|                     && positional_only_passed_as_keyword(tstate, co,
 | |
|                                                         kwcount, kwnames,
 | |
|                                                         func->func_qualname))
 | |
|                 {
 | |
|                     goto kw_fail;
 | |
|                 }
 | |
| 
 | |
|                 _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                             "%U() got an unexpected keyword argument '%S'",
 | |
|                           func->func_qualname, keyword);
 | |
|                 goto kw_fail;
 | |
|             }
 | |
| 
 | |
|             if (PyDict_SetItem(kwdict, keyword, value) == -1) {
 | |
|                 goto kw_fail;
 | |
|             }
 | |
|             Py_DECREF(value);
 | |
|             continue;
 | |
| 
 | |
|         kw_fail:
 | |
|             for (;i < kwcount; i++) {
 | |
|                 PyObject *value = args[i+argcount];
 | |
|                 Py_DECREF(value);
 | |
|             }
 | |
|             goto fail_post_args;
 | |
| 
 | |
|         kw_found:
 | |
|             if (localsplus[j] != NULL) {
 | |
|                 _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                             "%U() got multiple values for argument '%S'",
 | |
|                           func->func_qualname, keyword);
 | |
|                 goto kw_fail;
 | |
|             }
 | |
|             localsplus[j] = value;
 | |
|         }
 | |
|     }
 | |
| 
 | |
|     /* Check the number of positional arguments */
 | |
|     if ((argcount > co->co_argcount) && !(co->co_flags & CO_VARARGS)) {
 | |
|         too_many_positional(tstate, co, argcount, func->func_defaults, localsplus,
 | |
|                             func->func_qualname);
 | |
|         goto fail_post_args;
 | |
|     }
 | |
| 
 | |
|     /* Add missing positional arguments (copy default values from defs) */
 | |
|     if (argcount < co->co_argcount) {
 | |
|         Py_ssize_t defcount = func->func_defaults == NULL ? 0 : PyTuple_GET_SIZE(func->func_defaults);
 | |
|         Py_ssize_t m = co->co_argcount - defcount;
 | |
|         Py_ssize_t missing = 0;
 | |
|         for (i = argcount; i < m; i++) {
 | |
|             if (localsplus[i] == NULL) {
 | |
|                 missing++;
 | |
|             }
 | |
|         }
 | |
|         if (missing) {
 | |
|             missing_arguments(tstate, co, missing, defcount, localsplus,
 | |
|                               func->func_qualname);
 | |
|             goto fail_post_args;
 | |
|         }
 | |
|         if (n > m)
 | |
|             i = n - m;
 | |
|         else
 | |
|             i = 0;
 | |
|         if (defcount) {
 | |
|             PyObject **defs = &PyTuple_GET_ITEM(func->func_defaults, 0);
 | |
|             for (; i < defcount; i++) {
 | |
|                 if (localsplus[m+i] == NULL) {
 | |
|                     PyObject *def = defs[i];
 | |
|                     localsplus[m+i] = Py_NewRef(def);
 | |
|                 }
 | |
|             }
 | |
|         }
 | |
|     }
 | |
| 
 | |
|     /* Add missing keyword arguments (copy default values from kwdefs) */
 | |
|     if (co->co_kwonlyargcount > 0) {
 | |
|         Py_ssize_t missing = 0;
 | |
|         for (i = co->co_argcount; i < total_args; i++) {
 | |
|             if (localsplus[i] != NULL)
 | |
|                 continue;
 | |
|             PyObject *varname = PyTuple_GET_ITEM(co->co_localsplusnames, i);
 | |
|             if (func->func_kwdefaults != NULL) {
 | |
|                 PyObject *def = PyDict_GetItemWithError(func->func_kwdefaults, varname);
 | |
|                 if (def) {
 | |
|                     localsplus[i] = Py_NewRef(def);
 | |
|                     continue;
 | |
|                 }
 | |
|                 else if (_PyErr_Occurred(tstate)) {
 | |
|                     goto fail_post_args;
 | |
|                 }
 | |
|             }
 | |
|             missing++;
 | |
|         }
 | |
|         if (missing) {
 | |
|             missing_arguments(tstate, co, missing, -1, localsplus,
 | |
|                               func->func_qualname);
 | |
|             goto fail_post_args;
 | |
|         }
 | |
|     }
 | |
|     return 0;
 | |
| 
 | |
| fail_pre_positional:
 | |
|     for (j = 0; j < argcount; j++) {
 | |
|         Py_DECREF(args[j]);
 | |
|     }
 | |
|     /* fall through */
 | |
| fail_post_positional:
 | |
|     if (kwnames) {
 | |
|         Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
 | |
|         for (j = argcount; j < argcount+kwcount; j++) {
 | |
|             Py_DECREF(args[j]);
 | |
|         }
 | |
|     }
 | |
|     /* fall through */
 | |
| fail_post_args:
 | |
|     return -1;
 | |
| }
 | |
| 
 | |
| /* Consumes references to func, locals and all the args */
 | |
| static _PyInterpreterFrame *
 | |
| _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
 | |
|                         PyObject *locals, PyObject* const* args,
 | |
|                         size_t argcount, PyObject *kwnames)
 | |
| {
 | |
|     PyCodeObject * code = (PyCodeObject *)func->func_code;
 | |
|     CALL_STAT_INC(frames_pushed);
 | |
|     _PyInterpreterFrame *frame = _PyThreadState_PushFrame(tstate, code->co_framesize);
 | |
|     if (frame == NULL) {
 | |
|         goto fail;
 | |
|     }
 | |
|     _PyFrame_InitializeSpecials(frame, func, locals, code);
 | |
|     PyObject **localsarray = &frame->localsplus[0];
 | |
|     for (int i = 0; i < code->co_nlocalsplus; i++) {
 | |
|         localsarray[i] = NULL;
 | |
|     }
 | |
|     if (initialize_locals(tstate, func, localsarray, args, argcount, kwnames)) {
 | |
|         assert(frame->owner != FRAME_OWNED_BY_GENERATOR);
 | |
|         _PyEvalFrameClearAndPop(tstate, frame);
 | |
|         return NULL;
 | |
|     }
 | |
|     return frame;
 | |
| fail:
 | |
|     /* Consume the references */
 | |
|     for (size_t i = 0; i < argcount; i++) {
 | |
|         Py_DECREF(args[i]);
 | |
|     }
 | |
|     if (kwnames) {
 | |
|         Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
 | |
|         for (Py_ssize_t i = 0; i < kwcount; i++) {
 | |
|             Py_DECREF(args[i+argcount]);
 | |
|         }
 | |
|     }
 | |
|     PyErr_NoMemory();
 | |
|     return NULL;
 | |
| }
 | |
| 
 | |
| static void
 | |
| clear_thread_frame(PyThreadState *tstate, _PyInterpreterFrame * frame)
 | |
| {
 | |
|     assert(frame->owner == FRAME_OWNED_BY_THREAD);
 | |
|     // Make sure that this is, indeed, the top frame. We can't check this in
 | |
|     // _PyThreadState_PopFrame, since f_code is already cleared at that point:
 | |
|     assert((PyObject **)frame + frame->f_code->co_framesize ==
 | |
|         tstate->datastack_top);
 | |
|     tstate->c_recursion_remaining--;
 | |
|     assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame);
 | |
|     _PyFrame_Clear(frame);
 | |
|     tstate->c_recursion_remaining++;
 | |
|     _PyThreadState_PopFrame(tstate, frame);
 | |
| }
 | |
| 
 | |
| static void
 | |
| clear_gen_frame(PyThreadState *tstate, _PyInterpreterFrame * frame)
 | |
| {
 | |
|     assert(frame->owner == FRAME_OWNED_BY_GENERATOR);
 | |
|     PyGenObject *gen = _PyFrame_GetGenerator(frame);
 | |
|     gen->gi_frame_state = FRAME_CLEARED;
 | |
|     assert(tstate->exc_info == &gen->gi_exc_state);
 | |
|     tstate->exc_info = gen->gi_exc_state.previous_item;
 | |
|     gen->gi_exc_state.previous_item = NULL;
 | |
|     tstate->c_recursion_remaining--;
 | |
|     assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame);
 | |
|     _PyFrame_Clear(frame);
 | |
|     tstate->c_recursion_remaining++;
 | |
|     frame->previous = NULL;
 | |
| }
 | |
| 
 | |
| static void
 | |
| _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame)
 | |
| {
 | |
|     if (frame->owner == FRAME_OWNED_BY_THREAD) {
 | |
|         clear_thread_frame(tstate, frame);
 | |
|     }
 | |
|     else {
 | |
|         clear_gen_frame(tstate, frame);
 | |
|     }
 | |
| }
 | |
| 
 | |
| 
 | |
| PyObject *
 | |
| _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
 | |
|                PyObject *locals,
 | |
|                PyObject* const* args, size_t argcount,
 | |
|                PyObject *kwnames)
 | |
| {
 | |
|     /* _PyEvalFramePushAndInit consumes the references
 | |
|      * to func, locals and all its arguments */
 | |
|     Py_INCREF(func);
 | |
|     Py_XINCREF(locals);
 | |
|     for (size_t i = 0; i < argcount; i++) {
 | |
|         Py_INCREF(args[i]);
 | |
|     }
 | |
|     if (kwnames) {
 | |
|         Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
 | |
|         for (Py_ssize_t i = 0; i < kwcount; i++) {
 | |
|             Py_INCREF(args[i+argcount]);
 | |
|         }
 | |
|     }
 | |
|     _PyInterpreterFrame *frame = _PyEvalFramePushAndInit(
 | |
|         tstate, func, locals, args, argcount, kwnames);
 | |
|     if (frame == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     EVAL_CALL_STAT_INC(EVAL_CALL_VECTOR);
 | |
|     return _PyEval_EvalFrame(tstate, frame, 0);
 | |
| }
 | |
| 
 | |
| /* Legacy API */
 | |
| PyObject *
 | |
| PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals,
 | |
|                   PyObject *const *args, int argcount,
 | |
|                   PyObject *const *kws, int kwcount,
 | |
|                   PyObject *const *defs, int defcount,
 | |
|                   PyObject *kwdefs, PyObject *closure)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     PyObject *res = NULL;
 | |
|     PyObject *defaults = _PyTuple_FromArray(defs, defcount);
 | |
|     if (defaults == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref
 | |
|     if (builtins == NULL) {
 | |
|         Py_DECREF(defaults);
 | |
|         return NULL;
 | |
|     }
 | |
|     if (locals == NULL) {
 | |
|         locals = globals;
 | |
|     }
 | |
|     PyObject *kwnames = NULL;
 | |
|     PyObject *const *allargs;
 | |
|     PyObject **newargs = NULL;
 | |
|     PyFunctionObject *func = NULL;
 | |
|     if (kwcount == 0) {
 | |
|         allargs = args;
 | |
|     }
 | |
|     else {
 | |
|         kwnames = PyTuple_New(kwcount);
 | |
|         if (kwnames == NULL) {
 | |
|             goto fail;
 | |
|         }
 | |
|         newargs = PyMem_Malloc(sizeof(PyObject *)*(kwcount+argcount));
 | |
|         if (newargs == NULL) {
 | |
|             goto fail;
 | |
|         }
 | |
|         for (int i = 0; i < argcount; i++) {
 | |
|             newargs[i] = args[i];
 | |
|         }
 | |
|         for (int i = 0; i < kwcount; i++) {
 | |
|             PyTuple_SET_ITEM(kwnames, i, Py_NewRef(kws[2*i]));
 | |
|             newargs[argcount+i] = kws[2*i+1];
 | |
|         }
 | |
|         allargs = newargs;
 | |
|     }
 | |
|     for (int i = 0; i < kwcount; i++) {
 | |
|         PyTuple_SET_ITEM(kwnames, i, Py_NewRef(kws[2*i]));
 | |
|     }
 | |
|     PyFrameConstructor constr = {
 | |
|         .fc_globals = globals,
 | |
|         .fc_builtins = builtins,
 | |
|         .fc_name = ((PyCodeObject *)_co)->co_name,
 | |
|         .fc_qualname = ((PyCodeObject *)_co)->co_name,
 | |
|         .fc_code = _co,
 | |
|         .fc_defaults = defaults,
 | |
|         .fc_kwdefaults = kwdefs,
 | |
|         .fc_closure = closure
 | |
|     };
 | |
|     func = _PyFunction_FromConstructor(&constr);
 | |
|     if (func == NULL) {
 | |
|         goto fail;
 | |
|     }
 | |
|     EVAL_CALL_STAT_INC(EVAL_CALL_LEGACY);
 | |
|     res = _PyEval_Vector(tstate, func, locals,
 | |
|                          allargs, argcount,
 | |
|                          kwnames);
 | |
| fail:
 | |
|     Py_XDECREF(func);
 | |
|     Py_XDECREF(kwnames);
 | |
|     PyMem_Free(newargs);
 | |
|     Py_DECREF(defaults);
 | |
|     return res;
 | |
| }
 | |
| 
 | |
| 
 | |
| /* Logic for the raise statement (too complicated for inlining).
 | |
|    This *consumes* a reference count to each of its arguments. */
 | |
| static int
 | |
| do_raise(PyThreadState *tstate, PyObject *exc, PyObject *cause)
 | |
| {
 | |
|     PyObject *type = NULL, *value = NULL;
 | |
| 
 | |
|     if (exc == NULL) {
 | |
|         /* Reraise */
 | |
|         _PyErr_StackItem *exc_info = _PyErr_GetTopmostException(tstate);
 | |
|         value = exc_info->exc_value;
 | |
|         if (Py_IsNone(value) || value == NULL) {
 | |
|             _PyErr_SetString(tstate, PyExc_RuntimeError,
 | |
|                              "No active exception to reraise");
 | |
|             return 0;
 | |
|         }
 | |
|         assert(PyExceptionInstance_Check(value));
 | |
|         type = PyExceptionInstance_Class(value);
 | |
|         Py_XINCREF(type);
 | |
|         Py_XINCREF(value);
 | |
|         PyObject *tb = PyException_GetTraceback(value); /* new ref */
 | |
|         _PyErr_Restore(tstate, type, value, tb);
 | |
|         return 1;
 | |
|     }
 | |
| 
 | |
|     /* We support the following forms of raise:
 | |
|        raise
 | |
|        raise <instance>
 | |
|        raise <type> */
 | |
| 
 | |
|     if (PyExceptionClass_Check(exc)) {
 | |
|         type = exc;
 | |
|         value = _PyObject_CallNoArgs(exc);
 | |
|         if (value == NULL)
 | |
|             goto raise_error;
 | |
|         if (!PyExceptionInstance_Check(value)) {
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "calling %R should have returned an instance of "
 | |
|                           "BaseException, not %R",
 | |
|                           type, Py_TYPE(value));
 | |
|              goto raise_error;
 | |
|         }
 | |
|     }
 | |
|     else if (PyExceptionInstance_Check(exc)) {
 | |
|         value = exc;
 | |
|         type = PyExceptionInstance_Class(exc);
 | |
|         Py_INCREF(type);
 | |
|     }
 | |
|     else {
 | |
|         /* Not something you can raise.  You get an exception
 | |
|            anyway, just not what you specified :-) */
 | |
|         Py_DECREF(exc);
 | |
|         _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|                          "exceptions must derive from BaseException");
 | |
|         goto raise_error;
 | |
|     }
 | |
| 
 | |
|     assert(type != NULL);
 | |
|     assert(value != NULL);
 | |
| 
 | |
|     if (cause) {
 | |
|         PyObject *fixed_cause;
 | |
|         if (PyExceptionClass_Check(cause)) {
 | |
|             fixed_cause = _PyObject_CallNoArgs(cause);
 | |
|             if (fixed_cause == NULL)
 | |
|                 goto raise_error;
 | |
|             Py_DECREF(cause);
 | |
|         }
 | |
|         else if (PyExceptionInstance_Check(cause)) {
 | |
|             fixed_cause = cause;
 | |
|         }
 | |
|         else if (Py_IsNone(cause)) {
 | |
|             Py_DECREF(cause);
 | |
|             fixed_cause = NULL;
 | |
|         }
 | |
|         else {
 | |
|             _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|                              "exception causes must derive from "
 | |
|                              "BaseException");
 | |
|             goto raise_error;
 | |
|         }
 | |
|         PyException_SetCause(value, fixed_cause);
 | |
|     }
 | |
| 
 | |
|     _PyErr_SetObject(tstate, type, value);
 | |
|     /* _PyErr_SetObject incref's its arguments */
 | |
|     Py_DECREF(value);
 | |
|     Py_DECREF(type);
 | |
|     return 0;
 | |
| 
 | |
| raise_error:
 | |
|     Py_XDECREF(value);
 | |
|     Py_XDECREF(type);
 | |
|     Py_XDECREF(cause);
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| /* Logic for matching an exception in an except* clause (too
 | |
|    complicated for inlining).
 | |
| */
 | |
| 
 | |
| static int
 | |
| exception_group_match(PyObject* exc_value, PyObject *match_type,
 | |
|                       PyObject **match, PyObject **rest)
 | |
| {
 | |
|     if (Py_IsNone(exc_value)) {
 | |
|         *match = Py_NewRef(Py_None);
 | |
|         *rest = Py_NewRef(Py_None);
 | |
|         return 0;
 | |
|     }
 | |
|     assert(PyExceptionInstance_Check(exc_value));
 | |
| 
 | |
|     if (PyErr_GivenExceptionMatches(exc_value, match_type)) {
 | |
|         /* Full match of exc itself */
 | |
|         bool is_eg = _PyBaseExceptionGroup_Check(exc_value);
 | |
|         if (is_eg) {
 | |
|             *match = Py_NewRef(exc_value);
 | |
|         }
 | |
|         else {
 | |
|             /* naked exception - wrap it */
 | |
|             PyObject *excs = PyTuple_Pack(1, exc_value);
 | |
|             if (excs == NULL) {
 | |
|                 return -1;
 | |
|             }
 | |
|             PyObject *wrapped = _PyExc_CreateExceptionGroup("", excs);
 | |
|             Py_DECREF(excs);
 | |
|             if (wrapped == NULL) {
 | |
|                 return -1;
 | |
|             }
 | |
|             *match = wrapped;
 | |
|         }
 | |
|         *rest = Py_NewRef(Py_None);
 | |
|         return 0;
 | |
|     }
 | |
| 
 | |
|     /* exc_value does not match match_type.
 | |
|      * Check for partial match if it's an exception group.
 | |
|      */
 | |
|     if (_PyBaseExceptionGroup_Check(exc_value)) {
 | |
|         PyObject *pair = PyObject_CallMethod(exc_value, "split", "(O)",
 | |
|                                              match_type);
 | |
|         if (pair == NULL) {
 | |
|             return -1;
 | |
|         }
 | |
|         assert(PyTuple_CheckExact(pair));
 | |
|         assert(PyTuple_GET_SIZE(pair) == 2);
 | |
|         *match = Py_NewRef(PyTuple_GET_ITEM(pair, 0));
 | |
|         *rest = Py_NewRef(PyTuple_GET_ITEM(pair, 1));
 | |
|         Py_DECREF(pair);
 | |
|         return 0;
 | |
|     }
 | |
|     /* no match */
 | |
|     *match = Py_NewRef(Py_None);
 | |
|     *rest = Py_NewRef(Py_None);
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| /* Iterate v argcnt times and store the results on the stack (via decreasing
 | |
|    sp).  Return 1 for success, 0 if error.
 | |
| 
 | |
|    If argcntafter == -1, do a simple unpack. If it is >= 0, do an unpack
 | |
|    with a variable target.
 | |
| */
 | |
| 
 | |
| static int
 | |
| unpack_iterable(PyThreadState *tstate, PyObject *v,
 | |
|                 int argcnt, int argcntafter, PyObject **sp)
 | |
| {
 | |
|     int i = 0, j = 0;
 | |
|     Py_ssize_t ll = 0;
 | |
|     PyObject *it;  /* iter(v) */
 | |
|     PyObject *w;
 | |
|     PyObject *l = NULL; /* variable list */
 | |
| 
 | |
|     assert(v != NULL);
 | |
| 
 | |
|     it = PyObject_GetIter(v);
 | |
|     if (it == NULL) {
 | |
|         if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) &&
 | |
|             Py_TYPE(v)->tp_iter == NULL && !PySequence_Check(v))
 | |
|         {
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "cannot unpack non-iterable %.200s object",
 | |
|                           Py_TYPE(v)->tp_name);
 | |
|         }
 | |
|         return 0;
 | |
|     }
 | |
| 
 | |
|     for (; i < argcnt; i++) {
 | |
|         w = PyIter_Next(it);
 | |
|         if (w == NULL) {
 | |
|             /* Iterator done, via error or exhaustion. */
 | |
|             if (!_PyErr_Occurred(tstate)) {
 | |
|                 if (argcntafter == -1) {
 | |
|                     _PyErr_Format(tstate, PyExc_ValueError,
 | |
|                                   "not enough values to unpack "
 | |
|                                   "(expected %d, got %d)",
 | |
|                                   argcnt, i);
 | |
|                 }
 | |
|                 else {
 | |
|                     _PyErr_Format(tstate, PyExc_ValueError,
 | |
|                                   "not enough values to unpack "
 | |
|                                   "(expected at least %d, got %d)",
 | |
|                                   argcnt + argcntafter, i);
 | |
|                 }
 | |
|             }
 | |
|             goto Error;
 | |
|         }
 | |
|         *--sp = w;
 | |
|     }
 | |
| 
 | |
|     if (argcntafter == -1) {
 | |
|         /* We better have exhausted the iterator now. */
 | |
|         w = PyIter_Next(it);
 | |
|         if (w == NULL) {
 | |
|             if (_PyErr_Occurred(tstate))
 | |
|                 goto Error;
 | |
|             Py_DECREF(it);
 | |
|             return 1;
 | |
|         }
 | |
|         Py_DECREF(w);
 | |
|         _PyErr_Format(tstate, PyExc_ValueError,
 | |
|                       "too many values to unpack (expected %d)",
 | |
|                       argcnt);
 | |
|         goto Error;
 | |
|     }
 | |
| 
 | |
|     l = PySequence_List(it);
 | |
|     if (l == NULL)
 | |
|         goto Error;
 | |
|     *--sp = l;
 | |
|     i++;
 | |
| 
 | |
|     ll = PyList_GET_SIZE(l);
 | |
|     if (ll < argcntafter) {
 | |
|         _PyErr_Format(tstate, PyExc_ValueError,
 | |
|             "not enough values to unpack (expected at least %d, got %zd)",
 | |
|             argcnt + argcntafter, argcnt + ll);
 | |
|         goto Error;
 | |
|     }
 | |
| 
 | |
|     /* Pop the "after-variable" args off the list. */
 | |
|     for (j = argcntafter; j > 0; j--, i++) {
 | |
|         *--sp = PyList_GET_ITEM(l, ll - j);
 | |
|     }
 | |
|     /* Resize the list. */
 | |
|     Py_SET_SIZE(l, ll - argcntafter);
 | |
|     Py_DECREF(it);
 | |
|     return 1;
 | |
| 
 | |
| Error:
 | |
|     for (; i > 0; i--, sp++)
 | |
|         Py_DECREF(*sp);
 | |
|     Py_XDECREF(it);
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static void
 | |
| call_exc_trace(Py_tracefunc func, PyObject *self,
 | |
|                PyThreadState *tstate,
 | |
|                _PyInterpreterFrame *f)
 | |
| {
 | |
|     PyObject *type, *value, *traceback, *orig_traceback, *arg;
 | |
|     int err;
 | |
|     _PyErr_Fetch(tstate, &type, &value, &orig_traceback);
 | |
|     if (value == NULL) {
 | |
|         value = Py_NewRef(Py_None);
 | |
|     }
 | |
|     _PyErr_NormalizeException(tstate, &type, &value, &orig_traceback);
 | |
|     traceback = (orig_traceback != NULL) ? orig_traceback : Py_None;
 | |
|     arg = PyTuple_Pack(3, type, value, traceback);
 | |
|     if (arg == NULL) {
 | |
|         _PyErr_Restore(tstate, type, value, orig_traceback);
 | |
|         return;
 | |
|     }
 | |
|     err = call_trace(func, self, tstate, f, PyTrace_EXCEPTION, arg);
 | |
|     Py_DECREF(arg);
 | |
|     if (err == 0) {
 | |
|         _PyErr_Restore(tstate, type, value, orig_traceback);
 | |
|     }
 | |
|     else {
 | |
|         Py_XDECREF(type);
 | |
|         Py_XDECREF(value);
 | |
|         Py_XDECREF(orig_traceback);
 | |
|     }
 | |
| }
 | |
| 
 | |
| static int
 | |
| call_trace_protected(Py_tracefunc func, PyObject *obj,
 | |
|                      PyThreadState *tstate, _PyInterpreterFrame *frame,
 | |
|                      int what, PyObject *arg)
 | |
| {
 | |
|     PyObject *type, *value, *traceback;
 | |
|     int err;
 | |
|     _PyErr_Fetch(tstate, &type, &value, &traceback);
 | |
|     err = call_trace(func, obj, tstate, frame, what, arg);
 | |
|     if (err == 0)
 | |
|     {
 | |
|         _PyErr_Restore(tstate, type, value, traceback);
 | |
|         return 0;
 | |
|     }
 | |
|     else {
 | |
|         Py_XDECREF(type);
 | |
|         Py_XDECREF(value);
 | |
|         Py_XDECREF(traceback);
 | |
|         return -1;
 | |
|     }
 | |
| }
 | |
| 
 | |
| static void
 | |
| initialize_trace_info(PyTraceInfo *trace_info, _PyInterpreterFrame *frame)
 | |
| {
 | |
|     PyCodeObject *code = frame->f_code;
 | |
|     if (trace_info->code != code) {
 | |
|         trace_info->code = code;
 | |
|         _PyCode_InitAddressRange(code, &trace_info->bounds);
 | |
|     }
 | |
| }
 | |
| 
 | |
| void
 | |
| PyThreadState_EnterTracing(PyThreadState *tstate)
 | |
| {
 | |
|     tstate->tracing++;
 | |
|     tstate->cframe->use_tracing = 0;
 | |
| }
 | |
| 
 | |
| void
 | |
| PyThreadState_LeaveTracing(PyThreadState *tstate)
 | |
| {
 | |
|     assert(tstate->tracing > 0 && tstate->cframe->use_tracing == 0);
 | |
|     tstate->tracing--;
 | |
|     _PyThreadState_UpdateTracingState(tstate);
 | |
| }
 | |
| 
 | |
| static int
 | |
| call_trace(Py_tracefunc func, PyObject *obj,
 | |
|            PyThreadState *tstate, _PyInterpreterFrame *frame,
 | |
|            int what, PyObject *arg)
 | |
| {
 | |
|     int result;
 | |
|     if (tstate->tracing) {
 | |
|         return 0;
 | |
|     }
 | |
|     PyFrameObject *f = _PyFrame_GetFrameObject(frame);
 | |
|     if (f == NULL) {
 | |
|         return -1;
 | |
|     }
 | |
|     int old_what = tstate->tracing_what;
 | |
|     tstate->tracing_what = what;
 | |
|     PyThreadState_EnterTracing(tstate);
 | |
|     assert(_PyInterpreterFrame_LASTI(frame) >= 0);
 | |
|     if (_PyCode_InitLineArray(frame->f_code)) {
 | |
|         return -1;
 | |
|     }
 | |
|     f->f_lineno = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame));
 | |
|     result = func(obj, f, what, arg);
 | |
|     f->f_lineno = 0;
 | |
|     PyThreadState_LeaveTracing(tstate);
 | |
|     tstate->tracing_what = old_what;
 | |
|     return result;
 | |
| }
 | |
| 
 | |
| PyObject*
 | |
| _PyEval_CallTracing(PyObject *func, PyObject *args)
 | |
| {
 | |
|     // Save and disable tracing
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     int save_tracing = tstate->tracing;
 | |
|     int save_use_tracing = tstate->cframe->use_tracing;
 | |
|     tstate->tracing = 0;
 | |
| 
 | |
|     // Call the tracing function
 | |
|     PyObject *result = PyObject_Call(func, args, NULL);
 | |
| 
 | |
|     // Restore tracing
 | |
|     tstate->tracing = save_tracing;
 | |
|     tstate->cframe->use_tracing = save_use_tracing;
 | |
|     return result;
 | |
| }
 | |
| 
 | |
| /* See Objects/lnotab_notes.txt for a description of how tracing works. */
 | |
| static int
 | |
| maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
 | |
|                       PyThreadState *tstate, _PyInterpreterFrame *frame, int instr_prev)
 | |
| {
 | |
|     int result = 0;
 | |
| 
 | |
|     /* If the last instruction falls at the start of a line or if it
 | |
|        represents a jump backwards, update the frame's line number and
 | |
|        then call the trace function if we're tracing source lines.
 | |
|     */
 | |
|     if (_PyCode_InitLineArray(frame->f_code)) {
 | |
|         return -1;
 | |
|     }
 | |
|     int lastline;
 | |
|     if (instr_prev <= frame->f_code->_co_firsttraceable) {
 | |
|         lastline = -1;
 | |
|     }
 | |
|     else {
 | |
|         lastline = _PyCode_LineNumberFromArray(frame->f_code, instr_prev);
 | |
|     }
 | |
|     int line = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame));
 | |
|     PyFrameObject *f = _PyFrame_GetFrameObject(frame);
 | |
|     if (f == NULL) {
 | |
|         return -1;
 | |
|     }
 | |
|     if (line != -1 && f->f_trace_lines) {
 | |
|         /* Trace backward edges (except in 'yield from') or if line number has changed */
 | |
|         int trace = line != lastline ||
 | |
|             (_PyInterpreterFrame_LASTI(frame) < instr_prev &&
 | |
|              // SEND has no quickened forms, so no need to use _PyOpcode_Deopt
 | |
|              // here:
 | |
|              _Py_OPCODE(*frame->prev_instr) != SEND);
 | |
|         if (trace) {
 | |
|             result = call_trace(func, obj, tstate, frame, PyTrace_LINE, Py_None);
 | |
|         }
 | |
|     }
 | |
|     /* Always emit an opcode event if we're tracing all opcodes. */
 | |
|     if (f->f_trace_opcodes && result == 0) {
 | |
|         result = call_trace(func, obj, tstate, frame, PyTrace_OPCODE, Py_None);
 | |
|     }
 | |
|     return result;
 | |
| }
 | |
| 
 | |
| int
 | |
| _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
 | |
| {
 | |
|     assert(is_tstate_valid(tstate));
 | |
|     /* The caller must hold the GIL */
 | |
|     assert(PyGILState_Check());
 | |
| 
 | |
|     /* Call _PySys_Audit() in the context of the current thread state,
 | |
|        even if tstate is not the current thread state. */
 | |
|     PyThreadState *current_tstate = _PyThreadState_GET();
 | |
|     if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) {
 | |
|         return -1;
 | |
|     }
 | |
| 
 | |
|     tstate->c_profilefunc = func;
 | |
|     PyObject *old_profileobj = tstate->c_profileobj;
 | |
|     tstate->c_profileobj = Py_XNewRef(arg);
 | |
|     /* Flag that tracing or profiling is turned on */
 | |
|     _PyThreadState_UpdateTracingState(tstate);
 | |
| 
 | |
|     // gh-98257: Only call Py_XDECREF() once the new profile function is fully
 | |
|     // set, so it's safe to call sys.setprofile() again (reentrant call).
 | |
|     Py_XDECREF(old_profileobj);
 | |
| 
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| void
 | |
| PyEval_SetProfile(Py_tracefunc func, PyObject *arg)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     if (_PyEval_SetProfile(tstate, func, arg) < 0) {
 | |
|         /* Log _PySys_Audit() error */
 | |
|         _PyErr_WriteUnraisableMsg("in PyEval_SetProfile", NULL);
 | |
|     }
 | |
| }
 | |
| 
 | |
| void
 | |
| PyEval_SetProfileAllThreads(Py_tracefunc func, PyObject *arg)
 | |
| {
 | |
|     PyThreadState *this_tstate = _PyThreadState_GET();
 | |
|     PyInterpreterState* interp = this_tstate->interp;
 | |
| 
 | |
|     _PyRuntimeState *runtime = &_PyRuntime;
 | |
|     HEAD_LOCK(runtime);
 | |
|     PyThreadState* ts = PyInterpreterState_ThreadHead(interp);
 | |
|     HEAD_UNLOCK(runtime);
 | |
| 
 | |
|     while (ts) {
 | |
|         if (_PyEval_SetProfile(ts, func, arg) < 0) {
 | |
|             _PyErr_WriteUnraisableMsg("in PyEval_SetProfileAllThreads", NULL);
 | |
|         }
 | |
|         HEAD_LOCK(runtime);
 | |
|         ts = PyThreadState_Next(ts);
 | |
|         HEAD_UNLOCK(runtime);
 | |
|     }
 | |
| }
 | |
| 
 | |
| int
 | |
| _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg)
 | |
| {
 | |
|     assert(is_tstate_valid(tstate));
 | |
|     /* The caller must hold the GIL */
 | |
|     assert(PyGILState_Check());
 | |
| 
 | |
|     /* Call _PySys_Audit() in the context of the current thread state,
 | |
|        even if tstate is not the current thread state. */
 | |
|     PyThreadState *current_tstate = _PyThreadState_GET();
 | |
|     if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) {
 | |
|         return -1;
 | |
|     }
 | |
| 
 | |
|     tstate->c_tracefunc = func;
 | |
|     PyObject *old_traceobj = tstate->c_traceobj;
 | |
|     tstate->c_traceobj = Py_XNewRef(arg);
 | |
|     /* Flag that tracing or profiling is turned on */
 | |
|     _PyThreadState_UpdateTracingState(tstate);
 | |
| 
 | |
|     // gh-98257: Only call Py_XDECREF() once the new trace function is fully
 | |
|     // set, so it's safe to call sys.settrace() again (reentrant call).
 | |
|     Py_XDECREF(old_traceobj);
 | |
| 
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| void
 | |
| PyEval_SetTrace(Py_tracefunc func, PyObject *arg)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     if (_PyEval_SetTrace(tstate, func, arg) < 0) {
 | |
|         /* Log _PySys_Audit() error */
 | |
|         _PyErr_WriteUnraisableMsg("in PyEval_SetTrace", NULL);
 | |
|     }
 | |
| }
 | |
| 
 | |
| void
 | |
| PyEval_SetTraceAllThreads(Py_tracefunc func, PyObject *arg)
 | |
| {
 | |
|     PyThreadState *this_tstate = _PyThreadState_GET();
 | |
|     PyInterpreterState* interp = this_tstate->interp;
 | |
| 
 | |
|     _PyRuntimeState *runtime = &_PyRuntime;
 | |
|     HEAD_LOCK(runtime);
 | |
|     PyThreadState* ts = PyInterpreterState_ThreadHead(interp);
 | |
|     HEAD_UNLOCK(runtime);
 | |
| 
 | |
|     while (ts) {
 | |
|         if (_PyEval_SetTrace(ts, func, arg) < 0) {
 | |
|             _PyErr_WriteUnraisableMsg("in PyEval_SetTraceAllThreads", NULL);
 | |
|         }
 | |
|         HEAD_LOCK(runtime);
 | |
|         ts = PyThreadState_Next(ts);
 | |
|         HEAD_UNLOCK(runtime);
 | |
|     }
 | |
| }
 | |
| 
 | |
| int
 | |
| _PyEval_SetCoroutineOriginTrackingDepth(int depth)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     if (depth < 0) {
 | |
|         _PyErr_SetString(tstate, PyExc_ValueError, "depth must be >= 0");
 | |
|         return -1;
 | |
|     }
 | |
|     tstate->coroutine_origin_tracking_depth = depth;
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| 
 | |
| int
 | |
| _PyEval_GetCoroutineOriginTrackingDepth(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return tstate->coroutine_origin_tracking_depth;
 | |
| }
 | |
| 
 | |
| int
 | |
| _PyEval_SetAsyncGenFirstiter(PyObject *firstiter)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
| 
 | |
|     if (_PySys_Audit(tstate, "sys.set_asyncgen_hook_firstiter", NULL) < 0) {
 | |
|         return -1;
 | |
|     }
 | |
| 
 | |
|     Py_XSETREF(tstate->async_gen_firstiter, Py_XNewRef(firstiter));
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| _PyEval_GetAsyncGenFirstiter(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return tstate->async_gen_firstiter;
 | |
| }
 | |
| 
 | |
| int
 | |
| _PyEval_SetAsyncGenFinalizer(PyObject *finalizer)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
| 
 | |
|     if (_PySys_Audit(tstate, "sys.set_asyncgen_hook_finalizer", NULL) < 0) {
 | |
|         return -1;
 | |
|     }
 | |
| 
 | |
|     Py_XSETREF(tstate->async_gen_finalizer, Py_XNewRef(finalizer));
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| _PyEval_GetAsyncGenFinalizer(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return tstate->async_gen_finalizer;
 | |
| }
 | |
| 
 | |
| _PyInterpreterFrame *
 | |
| _PyEval_GetFrame(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return tstate->cframe->current_frame;
 | |
| }
 | |
| 
 | |
| PyFrameObject *
 | |
| PyEval_GetFrame(void)
 | |
| {
 | |
|     _PyInterpreterFrame *frame = _PyEval_GetFrame();
 | |
|     while (frame && _PyFrame_IsIncomplete(frame)) {
 | |
|         frame = frame->previous;
 | |
|     }
 | |
|     if (frame == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     PyFrameObject *f = _PyFrame_GetFrameObject(frame);
 | |
|     if (f == NULL) {
 | |
|         PyErr_Clear();
 | |
|     }
 | |
|     return f;
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| _PyEval_GetBuiltins(PyThreadState *tstate)
 | |
| {
 | |
|     _PyInterpreterFrame *frame = tstate->cframe->current_frame;
 | |
|     if (frame != NULL) {
 | |
|         return frame->f_builtins;
 | |
|     }
 | |
|     return tstate->interp->builtins;
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| PyEval_GetBuiltins(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     return _PyEval_GetBuiltins(tstate);
 | |
| }
 | |
| 
 | |
| /* Convenience function to get a builtin from its name */
 | |
| PyObject *
 | |
| _PyEval_GetBuiltin(PyObject *name)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     PyObject *attr = PyDict_GetItemWithError(PyEval_GetBuiltins(), name);
 | |
|     if (attr) {
 | |
|         Py_INCREF(attr);
 | |
|     }
 | |
|     else if (!_PyErr_Occurred(tstate)) {
 | |
|         _PyErr_SetObject(tstate, PyExc_AttributeError, name);
 | |
|     }
 | |
|     return attr;
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| _PyEval_GetBuiltinId(_Py_Identifier *name)
 | |
| {
 | |
|     return _PyEval_GetBuiltin(_PyUnicode_FromId(name));
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| PyEval_GetLocals(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|      _PyInterpreterFrame *current_frame = tstate->cframe->current_frame;
 | |
|     if (current_frame == NULL) {
 | |
|         _PyErr_SetString(tstate, PyExc_SystemError, "frame does not exist");
 | |
|         return NULL;
 | |
|     }
 | |
| 
 | |
|     if (_PyFrame_FastToLocalsWithError(current_frame) < 0) {
 | |
|         return NULL;
 | |
|     }
 | |
| 
 | |
|     PyObject *locals = current_frame->f_locals;
 | |
|     assert(locals != NULL);
 | |
|     return locals;
 | |
| }
 | |
| 
 | |
| PyObject *
 | |
| PyEval_GetGlobals(void)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     _PyInterpreterFrame *current_frame = tstate->cframe->current_frame;
 | |
|     if (current_frame == NULL) {
 | |
|         return NULL;
 | |
|     }
 | |
|     return current_frame->f_globals;
 | |
| }
 | |
| 
 | |
| int
 | |
| PyEval_MergeCompilerFlags(PyCompilerFlags *cf)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     _PyInterpreterFrame *current_frame = tstate->cframe->current_frame;
 | |
|     int result = cf->cf_flags != 0;
 | |
| 
 | |
|     if (current_frame != NULL) {
 | |
|         const int codeflags = current_frame->f_code->co_flags;
 | |
|         const int compilerflags = codeflags & PyCF_MASK;
 | |
|         if (compilerflags) {
 | |
|             result = 1;
 | |
|             cf->cf_flags |= compilerflags;
 | |
|         }
 | |
|     }
 | |
|     return result;
 | |
| }
 | |
| 
 | |
| 
 | |
| const char *
 | |
| PyEval_GetFuncName(PyObject *func)
 | |
| {
 | |
|     if (PyMethod_Check(func))
 | |
|         return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func));
 | |
|     else if (PyFunction_Check(func))
 | |
|         return PyUnicode_AsUTF8(((PyFunctionObject*)func)->func_name);
 | |
|     else if (PyCFunction_Check(func))
 | |
|         return ((PyCFunctionObject*)func)->m_ml->ml_name;
 | |
|     else
 | |
|         return Py_TYPE(func)->tp_name;
 | |
| }
 | |
| 
 | |
| const char *
 | |
| PyEval_GetFuncDesc(PyObject *func)
 | |
| {
 | |
|     if (PyMethod_Check(func))
 | |
|         return "()";
 | |
|     else if (PyFunction_Check(func))
 | |
|         return "()";
 | |
|     else if (PyCFunction_Check(func))
 | |
|         return "()";
 | |
|     else
 | |
|         return " object";
 | |
| }
 | |
| 
 | |
| #define C_TRACE(x, call) \
 | |
| if (use_tracing && tstate->c_profilefunc) { \
 | |
|     if (call_trace(tstate->c_profilefunc, tstate->c_profileobj, \
 | |
|         tstate, tstate->cframe->current_frame, \
 | |
|         PyTrace_C_CALL, func)) { \
 | |
|         x = NULL; \
 | |
|     } \
 | |
|     else { \
 | |
|         x = call; \
 | |
|         if (tstate->c_profilefunc != NULL) { \
 | |
|             if (x == NULL) { \
 | |
|                 call_trace_protected(tstate->c_profilefunc, \
 | |
|                     tstate->c_profileobj, \
 | |
|                     tstate, tstate->cframe->current_frame, \
 | |
|                     PyTrace_C_EXCEPTION, func); \
 | |
|                 /* XXX should pass (type, value, tb) */ \
 | |
|             } else { \
 | |
|                 if (call_trace(tstate->c_profilefunc, \
 | |
|                     tstate->c_profileobj, \
 | |
|                     tstate, tstate->cframe->current_frame, \
 | |
|                     PyTrace_C_RETURN, func)) { \
 | |
|                     Py_DECREF(x); \
 | |
|                     x = NULL; \
 | |
|                 } \
 | |
|             } \
 | |
|         } \
 | |
|     } \
 | |
| } else { \
 | |
|     x = call; \
 | |
|     }
 | |
| 
 | |
| 
 | |
| static PyObject *
 | |
| trace_call_function(PyThreadState *tstate,
 | |
|                     PyObject *func,
 | |
|                     PyObject **args, Py_ssize_t nargs,
 | |
|                     PyObject *kwnames)
 | |
| {
 | |
|     int use_tracing = 1;
 | |
|     PyObject *x;
 | |
|     if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) {
 | |
|         C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames));
 | |
|         return x;
 | |
|     }
 | |
|     else if (Py_IS_TYPE(func, &PyMethodDescr_Type) && nargs > 0) {
 | |
|         /* We need to create a temporary bound method as argument
 | |
|            for profiling.
 | |
| 
 | |
|            If nargs == 0, then this cannot work because we have no
 | |
|            "self". In any case, the call itself would raise
 | |
|            TypeError (foo needs an argument), so we just skip
 | |
|            profiling. */
 | |
|         PyObject *self = args[0];
 | |
|         func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self));
 | |
|         if (func == NULL) {
 | |
|             return NULL;
 | |
|         }
 | |
|         C_TRACE(x, PyObject_Vectorcall(func,
 | |
|                                         args+1, nargs-1,
 | |
|                                         kwnames));
 | |
|         Py_DECREF(func);
 | |
|         return x;
 | |
|     }
 | |
|     return PyObject_Vectorcall(func, args, nargs | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames);
 | |
| }
 | |
| 
 | |
| static PyObject *
 | |
| do_call_core(PyThreadState *tstate,
 | |
|              PyObject *func,
 | |
|              PyObject *callargs,
 | |
|              PyObject *kwdict,
 | |
|              int use_tracing
 | |
|             )
 | |
| {
 | |
|     PyObject *result;
 | |
|     if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) {
 | |
|         C_TRACE(result, PyObject_Call(func, callargs, kwdict));
 | |
|         return result;
 | |
|     }
 | |
|     else if (Py_IS_TYPE(func, &PyMethodDescr_Type)) {
 | |
|         Py_ssize_t nargs = PyTuple_GET_SIZE(callargs);
 | |
|         if (nargs > 0 && use_tracing) {
 | |
|             /* We need to create a temporary bound method as argument
 | |
|                for profiling.
 | |
| 
 | |
|                If nargs == 0, then this cannot work because we have no
 | |
|                "self". In any case, the call itself would raise
 | |
|                TypeError (foo needs an argument), so we just skip
 | |
|                profiling. */
 | |
|             PyObject *self = PyTuple_GET_ITEM(callargs, 0);
 | |
|             func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self));
 | |
|             if (func == NULL) {
 | |
|                 return NULL;
 | |
|             }
 | |
| 
 | |
|             C_TRACE(result, _PyObject_FastCallDictTstate(
 | |
|                                     tstate, func,
 | |
|                                     &_PyTuple_ITEMS(callargs)[1],
 | |
|                                     nargs - 1,
 | |
|                                     kwdict));
 | |
|             Py_DECREF(func);
 | |
|             return result;
 | |
|         }
 | |
|     }
 | |
|     EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func);
 | |
|     return PyObject_Call(func, callargs, kwdict);
 | |
| }
 | |
| 
 | |
| /* Extract a slice index from a PyLong or an object with the
 | |
|    nb_index slot defined, and store in *pi.
 | |
|    Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
 | |
|    and silently boost values less than PY_SSIZE_T_MIN to PY_SSIZE_T_MIN.
 | |
|    Return 0 on error, 1 on success.
 | |
| */
 | |
| int
 | |
| _PyEval_SliceIndex(PyObject *v, Py_ssize_t *pi)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     if (!Py_IsNone(v)) {
 | |
|         Py_ssize_t x;
 | |
|         if (_PyIndex_Check(v)) {
 | |
|             x = PyNumber_AsSsize_t(v, NULL);
 | |
|             if (x == -1 && _PyErr_Occurred(tstate))
 | |
|                 return 0;
 | |
|         }
 | |
|         else {
 | |
|             _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|                              "slice indices must be integers or "
 | |
|                              "None or have an __index__ method");
 | |
|             return 0;
 | |
|         }
 | |
|         *pi = x;
 | |
|     }
 | |
|     return 1;
 | |
| }
 | |
| 
 | |
| int
 | |
| _PyEval_SliceIndexNotNone(PyObject *v, Py_ssize_t *pi)
 | |
| {
 | |
|     PyThreadState *tstate = _PyThreadState_GET();
 | |
|     Py_ssize_t x;
 | |
|     if (_PyIndex_Check(v)) {
 | |
|         x = PyNumber_AsSsize_t(v, NULL);
 | |
|         if (x == -1 && _PyErr_Occurred(tstate))
 | |
|             return 0;
 | |
|     }
 | |
|     else {
 | |
|         _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|                          "slice indices must be integers or "
 | |
|                          "have an __index__ method");
 | |
|         return 0;
 | |
|     }
 | |
|     *pi = x;
 | |
|     return 1;
 | |
| }
 | |
| 
 | |
| static PyObject *
 | |
| import_name(PyThreadState *tstate, _PyInterpreterFrame *frame,
 | |
|             PyObject *name, PyObject *fromlist, PyObject *level)
 | |
| {
 | |
|     PyObject *import_func, *res;
 | |
|     PyObject* stack[5];
 | |
| 
 | |
|     import_func = _PyDict_GetItemWithError(frame->f_builtins, &_Py_ID(__import__));
 | |
|     if (import_func == NULL) {
 | |
|         if (!_PyErr_Occurred(tstate)) {
 | |
|             _PyErr_SetString(tstate, PyExc_ImportError, "__import__ not found");
 | |
|         }
 | |
|         return NULL;
 | |
|     }
 | |
|     PyObject *locals = frame->f_locals;
 | |
|     /* Fast path for not overloaded __import__. */
 | |
|     if (import_func == tstate->interp->import_func) {
 | |
|         int ilevel = _PyLong_AsInt(level);
 | |
|         if (ilevel == -1 && _PyErr_Occurred(tstate)) {
 | |
|             return NULL;
 | |
|         }
 | |
|         res = PyImport_ImportModuleLevelObject(
 | |
|                         name,
 | |
|                         frame->f_globals,
 | |
|                         locals == NULL ? Py_None :locals,
 | |
|                         fromlist,
 | |
|                         ilevel);
 | |
|         return res;
 | |
|     }
 | |
| 
 | |
|     Py_INCREF(import_func);
 | |
| 
 | |
|     stack[0] = name;
 | |
|     stack[1] = frame->f_globals;
 | |
|     stack[2] = locals == NULL ? Py_None : locals;
 | |
|     stack[3] = fromlist;
 | |
|     stack[4] = level;
 | |
|     res = _PyObject_FastCall(import_func, stack, 5);
 | |
|     Py_DECREF(import_func);
 | |
|     return res;
 | |
| }
 | |
| 
 | |
| static PyObject *
 | |
| import_from(PyThreadState *tstate, PyObject *v, PyObject *name)
 | |
| {
 | |
|     PyObject *x;
 | |
|     PyObject *fullmodname, *pkgname, *pkgpath, *pkgname_or_unknown, *errmsg;
 | |
| 
 | |
|     if (_PyObject_LookupAttr(v, name, &x) != 0) {
 | |
|         return x;
 | |
|     }
 | |
|     /* Issue #17636: in case this failed because of a circular relative
 | |
|        import, try to fallback on reading the module directly from
 | |
|        sys.modules. */
 | |
|     pkgname = PyObject_GetAttr(v, &_Py_ID(__name__));
 | |
|     if (pkgname == NULL) {
 | |
|         goto error;
 | |
|     }
 | |
|     if (!PyUnicode_Check(pkgname)) {
 | |
|         Py_CLEAR(pkgname);
 | |
|         goto error;
 | |
|     }
 | |
|     fullmodname = PyUnicode_FromFormat("%U.%U", pkgname, name);
 | |
|     if (fullmodname == NULL) {
 | |
|         Py_DECREF(pkgname);
 | |
|         return NULL;
 | |
|     }
 | |
|     x = PyImport_GetModule(fullmodname);
 | |
|     Py_DECREF(fullmodname);
 | |
|     if (x == NULL && !_PyErr_Occurred(tstate)) {
 | |
|         goto error;
 | |
|     }
 | |
|     Py_DECREF(pkgname);
 | |
|     return x;
 | |
|  error:
 | |
|     pkgpath = PyModule_GetFilenameObject(v);
 | |
|     if (pkgname == NULL) {
 | |
|         pkgname_or_unknown = PyUnicode_FromString("<unknown module name>");
 | |
|         if (pkgname_or_unknown == NULL) {
 | |
|             Py_XDECREF(pkgpath);
 | |
|             return NULL;
 | |
|         }
 | |
|     } else {
 | |
|         pkgname_or_unknown = pkgname;
 | |
|     }
 | |
| 
 | |
|     if (pkgpath == NULL || !PyUnicode_Check(pkgpath)) {
 | |
|         _PyErr_Clear(tstate);
 | |
|         errmsg = PyUnicode_FromFormat(
 | |
|             "cannot import name %R from %R (unknown location)",
 | |
|             name, pkgname_or_unknown
 | |
|         );
 | |
|         /* NULL checks for errmsg and pkgname done by PyErr_SetImportError. */
 | |
|         _PyErr_SetImportErrorWithNameFrom(errmsg, pkgname, NULL, name);
 | |
|     }
 | |
|     else {
 | |
|         PyObject *spec = PyObject_GetAttr(v, &_Py_ID(__spec__));
 | |
|         const char *fmt =
 | |
|             _PyModuleSpec_IsInitializing(spec) ?
 | |
|             "cannot import name %R from partially initialized module %R "
 | |
|             "(most likely due to a circular import) (%S)" :
 | |
|             "cannot import name %R from %R (%S)";
 | |
|         Py_XDECREF(spec);
 | |
| 
 | |
|         errmsg = PyUnicode_FromFormat(fmt, name, pkgname_or_unknown, pkgpath);
 | |
|         /* NULL checks for errmsg and pkgname done by PyErr_SetImportError. */
 | |
|         _PyErr_SetImportErrorWithNameFrom(errmsg, pkgname, pkgpath, name);
 | |
|     }
 | |
| 
 | |
|     Py_XDECREF(errmsg);
 | |
|     Py_XDECREF(pkgname_or_unknown);
 | |
|     Py_XDECREF(pkgpath);
 | |
|     return NULL;
 | |
| }
 | |
| 
 | |
| static int
 | |
| import_all_from(PyThreadState *tstate, PyObject *locals, PyObject *v)
 | |
| {
 | |
|     PyObject *all, *dict, *name, *value;
 | |
|     int skip_leading_underscores = 0;
 | |
|     int pos, err;
 | |
| 
 | |
|     if (_PyObject_LookupAttr(v, &_Py_ID(__all__), &all) < 0) {
 | |
|         return -1; /* Unexpected error */
 | |
|     }
 | |
|     if (all == NULL) {
 | |
|         if (_PyObject_LookupAttr(v, &_Py_ID(__dict__), &dict) < 0) {
 | |
|             return -1;
 | |
|         }
 | |
|         if (dict == NULL) {
 | |
|             _PyErr_SetString(tstate, PyExc_ImportError,
 | |
|                     "from-import-* object has no __dict__ and no __all__");
 | |
|             return -1;
 | |
|         }
 | |
|         all = PyMapping_Keys(dict);
 | |
|         Py_DECREF(dict);
 | |
|         if (all == NULL)
 | |
|             return -1;
 | |
|         skip_leading_underscores = 1;
 | |
|     }
 | |
| 
 | |
|     for (pos = 0, err = 0; ; pos++) {
 | |
|         name = PySequence_GetItem(all, pos);
 | |
|         if (name == NULL) {
 | |
|             if (!_PyErr_ExceptionMatches(tstate, PyExc_IndexError)) {
 | |
|                 err = -1;
 | |
|             }
 | |
|             else {
 | |
|                 _PyErr_Clear(tstate);
 | |
|             }
 | |
|             break;
 | |
|         }
 | |
|         if (!PyUnicode_Check(name)) {
 | |
|             PyObject *modname = PyObject_GetAttr(v, &_Py_ID(__name__));
 | |
|             if (modname == NULL) {
 | |
|                 Py_DECREF(name);
 | |
|                 err = -1;
 | |
|                 break;
 | |
|             }
 | |
|             if (!PyUnicode_Check(modname)) {
 | |
|                 _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                               "module __name__ must be a string, not %.100s",
 | |
|                               Py_TYPE(modname)->tp_name);
 | |
|             }
 | |
|             else {
 | |
|                 _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                               "%s in %U.%s must be str, not %.100s",
 | |
|                               skip_leading_underscores ? "Key" : "Item",
 | |
|                               modname,
 | |
|                               skip_leading_underscores ? "__dict__" : "__all__",
 | |
|                               Py_TYPE(name)->tp_name);
 | |
|             }
 | |
|             Py_DECREF(modname);
 | |
|             Py_DECREF(name);
 | |
|             err = -1;
 | |
|             break;
 | |
|         }
 | |
|         if (skip_leading_underscores) {
 | |
|             if (PyUnicode_READY(name) == -1) {
 | |
|                 Py_DECREF(name);
 | |
|                 err = -1;
 | |
|                 break;
 | |
|             }
 | |
|             if (PyUnicode_READ_CHAR(name, 0) == '_') {
 | |
|                 Py_DECREF(name);
 | |
|                 continue;
 | |
|             }
 | |
|         }
 | |
|         value = PyObject_GetAttr(v, name);
 | |
|         if (value == NULL)
 | |
|             err = -1;
 | |
|         else if (PyDict_CheckExact(locals))
 | |
|             err = PyDict_SetItem(locals, name, value);
 | |
|         else
 | |
|             err = PyObject_SetItem(locals, name, value);
 | |
|         Py_DECREF(name);
 | |
|         Py_XDECREF(value);
 | |
|         if (err != 0)
 | |
|             break;
 | |
|     }
 | |
|     Py_DECREF(all);
 | |
|     return err;
 | |
| }
 | |
| 
 | |
| #define CANNOT_CATCH_MSG "catching classes that do not inherit from "\
 | |
|                          "BaseException is not allowed"
 | |
| 
 | |
| #define CANNOT_EXCEPT_STAR_EG "catching ExceptionGroup with except* "\
 | |
|                               "is not allowed. Use except instead."
 | |
| 
 | |
| static int
 | |
| check_except_type_valid(PyThreadState *tstate, PyObject* right)
 | |
| {
 | |
|     if (PyTuple_Check(right)) {
 | |
|         Py_ssize_t i, length;
 | |
|         length = PyTuple_GET_SIZE(right);
 | |
|         for (i = 0; i < length; i++) {
 | |
|             PyObject *exc = PyTuple_GET_ITEM(right, i);
 | |
|             if (!PyExceptionClass_Check(exc)) {
 | |
|                 _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|                     CANNOT_CATCH_MSG);
 | |
|                 return -1;
 | |
|             }
 | |
|         }
 | |
|     }
 | |
|     else {
 | |
|         if (!PyExceptionClass_Check(right)) {
 | |
|             _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|                 CANNOT_CATCH_MSG);
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static int
 | |
| check_except_star_type_valid(PyThreadState *tstate, PyObject* right)
 | |
| {
 | |
|     if (check_except_type_valid(tstate, right) < 0) {
 | |
|         return -1;
 | |
|     }
 | |
| 
 | |
|     /* reject except *ExceptionGroup */
 | |
| 
 | |
|     int is_subclass = 0;
 | |
|     if (PyTuple_Check(right)) {
 | |
|         Py_ssize_t length = PyTuple_GET_SIZE(right);
 | |
|         for (Py_ssize_t i = 0; i < length; i++) {
 | |
|             PyObject *exc = PyTuple_GET_ITEM(right, i);
 | |
|             is_subclass = PyObject_IsSubclass(exc, PyExc_BaseExceptionGroup);
 | |
|             if (is_subclass < 0) {
 | |
|                 return -1;
 | |
|             }
 | |
|             if (is_subclass) {
 | |
|                 break;
 | |
|             }
 | |
|         }
 | |
|     }
 | |
|     else {
 | |
|         is_subclass = PyObject_IsSubclass(right, PyExc_BaseExceptionGroup);
 | |
|         if (is_subclass < 0) {
 | |
|             return -1;
 | |
|         }
 | |
|     }
 | |
|     if (is_subclass) {
 | |
|         _PyErr_SetString(tstate, PyExc_TypeError,
 | |
|             CANNOT_EXCEPT_STAR_EG);
 | |
|             return -1;
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static int
 | |
| check_args_iterable(PyThreadState *tstate, PyObject *func, PyObject *args)
 | |
| {
 | |
|     if (Py_TYPE(args)->tp_iter == NULL && !PySequence_Check(args)) {
 | |
|         /* check_args_iterable() may be called with a live exception:
 | |
|          * clear it to prevent calling _PyObject_FunctionStr() with an
 | |
|          * exception set. */
 | |
|         _PyErr_Clear(tstate);
 | |
|         PyObject *funcstr = _PyObject_FunctionStr(func);
 | |
|         if (funcstr != NULL) {
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "%U argument after * must be an iterable, not %.200s",
 | |
|                           funcstr, Py_TYPE(args)->tp_name);
 | |
|             Py_DECREF(funcstr);
 | |
|         }
 | |
|         return -1;
 | |
|     }
 | |
|     return 0;
 | |
| }
 | |
| 
 | |
| static void
 | |
| format_kwargs_error(PyThreadState *tstate, PyObject *func, PyObject *kwargs)
 | |
| {
 | |
|     /* _PyDict_MergeEx raises attribute
 | |
|      * error (percolated from an attempt
 | |
|      * to get 'keys' attribute) instead of
 | |
|      * a type error if its second argument
 | |
|      * is not a mapping.
 | |
|      */
 | |
|     if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) {
 | |
|         _PyErr_Clear(tstate);
 | |
|         PyObject *funcstr = _PyObject_FunctionStr(func);
 | |
|         if (funcstr != NULL) {
 | |
|             _PyErr_Format(
 | |
|                 tstate, PyExc_TypeError,
 | |
|                 "%U argument after ** must be a mapping, not %.200s",
 | |
|                 funcstr, Py_TYPE(kwargs)->tp_name);
 | |
|             Py_DECREF(funcstr);
 | |
|         }
 | |
|     }
 | |
|     else if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) {
 | |
|         PyObject *exc, *val, *tb;
 | |
|         _PyErr_Fetch(tstate, &exc, &val, &tb);
 | |
|         if (val && PyTuple_Check(val) && PyTuple_GET_SIZE(val) == 1) {
 | |
|             _PyErr_Clear(tstate);
 | |
|             PyObject *funcstr = _PyObject_FunctionStr(func);
 | |
|             if (funcstr != NULL) {
 | |
|                 PyObject *key = PyTuple_GET_ITEM(val, 0);
 | |
|                 _PyErr_Format(
 | |
|                     tstate, PyExc_TypeError,
 | |
|                     "%U got multiple values for keyword argument '%S'",
 | |
|                     funcstr, key);
 | |
|                 Py_DECREF(funcstr);
 | |
|             }
 | |
|             Py_XDECREF(exc);
 | |
|             Py_XDECREF(val);
 | |
|             Py_XDECREF(tb);
 | |
|         }
 | |
|         else {
 | |
|             _PyErr_Restore(tstate, exc, val, tb);
 | |
|         }
 | |
|     }
 | |
| }
 | |
| 
 | |
| static void
 | |
| format_exc_check_arg(PyThreadState *tstate, PyObject *exc,
 | |
|                      const char *format_str, PyObject *obj)
 | |
| {
 | |
|     const char *obj_str;
 | |
| 
 | |
|     if (!obj)
 | |
|         return;
 | |
| 
 | |
|     obj_str = PyUnicode_AsUTF8(obj);
 | |
|     if (!obj_str)
 | |
|         return;
 | |
| 
 | |
|     _PyErr_Format(tstate, exc, format_str, obj_str);
 | |
| 
 | |
|     if (exc == PyExc_NameError) {
 | |
|         // Include the name in the NameError exceptions to offer suggestions later.
 | |
|         PyObject *type, *value, *traceback;
 | |
|         PyErr_Fetch(&type, &value, &traceback);
 | |
|         PyErr_NormalizeException(&type, &value, &traceback);
 | |
|         if (PyErr_GivenExceptionMatches(value, PyExc_NameError)) {
 | |
|             PyNameErrorObject* exc = (PyNameErrorObject*) value;
 | |
|             if (exc->name == NULL) {
 | |
|                 // We do not care if this fails because we are going to restore the
 | |
|                 // NameError anyway.
 | |
|                 (void)PyObject_SetAttr(value, &_Py_ID(name), obj);
 | |
|             }
 | |
|         }
 | |
|         PyErr_Restore(type, value, traceback);
 | |
|     }
 | |
| }
 | |
| 
 | |
| static void
 | |
| format_exc_unbound(PyThreadState *tstate, PyCodeObject *co, int oparg)
 | |
| {
 | |
|     PyObject *name;
 | |
|     /* Don't stomp existing exception */
 | |
|     if (_PyErr_Occurred(tstate))
 | |
|         return;
 | |
|     name = PyTuple_GET_ITEM(co->co_localsplusnames, oparg);
 | |
|     if (oparg < co->co_nplaincellvars + co->co_nlocals) {
 | |
|         format_exc_check_arg(tstate, PyExc_UnboundLocalError,
 | |
|                              UNBOUNDLOCAL_ERROR_MSG, name);
 | |
|     } else {
 | |
|         format_exc_check_arg(tstate, PyExc_NameError,
 | |
|                              UNBOUNDFREE_ERROR_MSG, name);
 | |
|     }
 | |
| }
 | |
| 
 | |
| static void
 | |
| format_awaitable_error(PyThreadState *tstate, PyTypeObject *type, int oparg)
 | |
| {
 | |
|     if (type->tp_as_async == NULL || type->tp_as_async->am_await == NULL) {
 | |
|         if (oparg == 1) {
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "'async with' received an object from __aenter__ "
 | |
|                           "that does not implement __await__: %.100s",
 | |
|                           type->tp_name);
 | |
|         }
 | |
|         else if (oparg == 2) {
 | |
|             _PyErr_Format(tstate, PyExc_TypeError,
 | |
|                           "'async with' received an object from __aexit__ "
 | |
|                           "that does not implement __await__: %.100s",
 | |
|                           type->tp_name);
 | |
|         }
 | |
|     }
 | |
| }
 | |
| 
 | |
| 
 | |
| Py_ssize_t
 | |
| _PyEval_RequestCodeExtraIndex(freefunc free)
 | |
| {
 | |
|     PyInterpreterState *interp = _PyInterpreterState_GET();
 | |
|     Py_ssize_t new_index;
 | |
| 
 | |
|     if (interp->co_extra_user_count == MAX_CO_EXTRA_USERS - 1) {
 | |
|         return -1;
 | |
|     }
 | |
|     new_index = interp->co_extra_user_count++;
 | |
|     interp->co_extra_freefuncs[new_index] = free;
 | |
|     return new_index;
 | |
| }
 | |
| 
 | |
| static void
 | |
| dtrace_function_entry(_PyInterpreterFrame *frame)
 | |
| {
 | |
|     const char *filename;
 | |
|     const char *funcname;
 | |
|     int lineno;
 | |
| 
 | |
|     PyCodeObject *code = frame->f_code;
 | |
|     filename = PyUnicode_AsUTF8(code->co_filename);
 | |
|     funcname = PyUnicode_AsUTF8(code->co_name);
 | |
|     lineno = _PyInterpreterFrame_GetLine(frame);
 | |
| 
 | |
|     PyDTrace_FUNCTION_ENTRY(filename, funcname, lineno);
 | |
| }
 | |
| 
 | |
| static void
 | |
| dtrace_function_return(_PyInterpreterFrame *frame)
 | |
| {
 | |
|     const char *filename;
 | |
|     const char *funcname;
 | |
|     int lineno;
 | |
| 
 | |
|     PyCodeObject *code = frame->f_code;
 | |
|     filename = PyUnicode_AsUTF8(code->co_filename);
 | |
|     funcname = PyUnicode_AsUTF8(code->co_name);
 | |
|     lineno = _PyInterpreterFrame_GetLine(frame);
 | |
| 
 | |
|     PyDTrace_FUNCTION_RETURN(filename, funcname, lineno);
 | |
| }
 | |
| 
 | |
| /* DTrace equivalent of maybe_call_line_trace. */
 | |
| static void
 | |
| maybe_dtrace_line(_PyInterpreterFrame *frame,
 | |
|                   PyTraceInfo *trace_info,
 | |
|                   int instr_prev)
 | |
| {
 | |
|     const char *co_filename, *co_name;
 | |
| 
 | |
|     /* If the last instruction executed isn't in the current
 | |
|        instruction window, reset the window.
 | |
|     */
 | |
|     initialize_trace_info(trace_info, frame);
 | |
|     int lastline = _PyCode_CheckLineNumber(instr_prev*sizeof(_Py_CODEUNIT), &trace_info->bounds);
 | |
|     int addr = _PyInterpreterFrame_LASTI(frame) * sizeof(_Py_CODEUNIT);
 | |
|     int line = _PyCode_CheckLineNumber(addr, &trace_info->bounds);
 | |
|     if (line != -1) {
 | |
|         /* Trace backward edges or first instruction of a new line */
 | |
|         if (_PyInterpreterFrame_LASTI(frame) < instr_prev ||
 | |
|             (line != lastline && addr == trace_info->bounds.ar_start))
 | |
|         {
 | |
|             co_filename = PyUnicode_AsUTF8(frame->f_code->co_filename);
 | |
|             if (!co_filename) {
 | |
|                 co_filename = "?";
 | |
|             }
 | |
|             co_name = PyUnicode_AsUTF8(frame->f_code->co_name);
 | |
|             if (!co_name) {
 | |
|                 co_name = "?";
 | |
|             }
 | |
|             PyDTrace_LINE(co_filename, co_name, line);
 | |
|         }
 | |
|     }
 | |
| }
 | |
| 
 | |
| /* Implement Py_EnterRecursiveCall() and Py_LeaveRecursiveCall() as functions
 | |
|    for the limited API. */
 | |
| 
 | |
| #undef Py_EnterRecursiveCall
 | |
| 
 | |
| int Py_EnterRecursiveCall(const char *where)
 | |
| {
 | |
|     return _Py_EnterRecursiveCall(where);
 | |
| }
 | |
| 
 | |
| #undef Py_LeaveRecursiveCall
 | |
| 
 | |
| void Py_LeaveRecursiveCall(void)
 | |
| {
 | |
|     _Py_LeaveRecursiveCall();
 | |
| }
 |