2020-06-22 17:27:35 +02:00
|
|
|
#ifndef Py_INTERNAL_TUPLE_H
|
|
|
|
|
#define Py_INTERNAL_TUPLE_H
|
2018-11-25 23:56:17 +01:00
|
|
|
#ifdef __cplusplus
|
|
|
|
|
extern "C" {
|
|
|
|
|
#endif
|
|
|
|
|
|
2019-04-17 23:02:26 +02:00
|
|
|
#ifndef Py_BUILD_CORE
|
|
|
|
|
# error "this header requires Py_BUILD_CORE define"
|
2018-11-25 23:56:17 +01:00
|
|
|
#endif
|
|
|
|
|
|
2025-03-27 09:57:06 -04:00
|
|
|
#include "pycore_object.h" // _PyObject_GC_IS_TRACKED
|
2025-03-19 17:33:24 +01:00
|
|
|
#include "pycore_structs.h" // _PyStackRef
|
|
|
|
|
|
2023-07-23 21:16:21 +02:00
|
|
|
extern void _PyTuple_MaybeUntrack(PyObject *);
|
|
|
|
|
extern void _PyTuple_DebugMallocStats(FILE *out);
|
2021-12-09 12:59:26 -07:00
|
|
|
|
|
|
|
|
/* runtime lifecycle */
|
|
|
|
|
|
|
|
|
|
extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/* other API */
|
|
|
|
|
|
2022-11-28 17:42:22 +01:00
|
|
|
#define _PyTuple_ITEMS(op) _Py_RVALUE(_PyTuple_CAST(op)->ob_item)
|
2020-06-22 17:27:35 +02:00
|
|
|
|
2024-11-07 23:40:03 +02:00
|
|
|
PyAPI_FUNC(PyObject *)_PyTuple_FromArray(PyObject *const *, Py_ssize_t);
|
2025-01-22 10:51:37 +00:00
|
|
|
PyAPI_FUNC(PyObject *)_PyTuple_FromStackRefStealOnSuccess(const union _PyStackRef *, Py_ssize_t);
|
2024-02-29 08:11:28 -08:00
|
|
|
PyAPI_FUNC(PyObject *)_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t);
|
2018-11-25 23:56:17 +01:00
|
|
|
|
2022-12-09 18:27:01 +08:00
|
|
|
typedef struct {
|
|
|
|
|
PyObject_HEAD
|
|
|
|
|
Py_ssize_t it_index;
|
|
|
|
|
PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
|
|
|
|
|
} _PyTupleIterObject;
|
|
|
|
|
|
2025-03-27 09:57:06 -04:00
|
|
|
#define _PyTuple_RESET_HASH_CACHE(op) \
|
|
|
|
|
do { \
|
|
|
|
|
assert(op != NULL); \
|
|
|
|
|
_PyTuple_CAST(op)->ob_hash = -1; \
|
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
|
|
/* bpo-42536: If reusing a tuple object, this should be called to re-track it
|
|
|
|
|
with the garbage collector and reset its hash cache. */
|
|
|
|
|
static inline void
|
|
|
|
|
_PyTuple_Recycle(PyObject *op)
|
|
|
|
|
{
|
|
|
|
|
_PyTuple_RESET_HASH_CACHE(op);
|
|
|
|
|
if (!_PyObject_GC_IS_TRACKED(op)) {
|
|
|
|
|
_PyObject_GC_TRACK(op);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Below are the official constants from the xxHash specification. Optimizing
|
|
|
|
|
compilers should emit a single "rotate" instruction for the
|
|
|
|
|
_PyTuple_HASH_XXROTATE() expansion. If that doesn't happen for some important
|
|
|
|
|
platform, the macro could be changed to expand to a platform-specific rotate
|
|
|
|
|
spelling instead.
|
|
|
|
|
*/
|
|
|
|
|
#if SIZEOF_PY_UHASH_T > 4
|
|
|
|
|
#define _PyTuple_HASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
|
|
|
|
|
#define _PyTuple_HASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
|
|
|
|
|
#define _PyTuple_HASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
|
|
|
|
|
#define _PyTuple_HASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
|
|
|
|
|
#else
|
|
|
|
|
#define _PyTuple_HASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
|
|
|
|
|
#define _PyTuple_HASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
|
|
|
|
|
#define _PyTuple_HASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
|
|
|
|
|
#define _PyTuple_HASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
|
|
|
|
|
#endif
|
|
|
|
|
#define _PyTuple_HASH_EMPTY (_PyTuple_HASH_XXPRIME_5 + (_PyTuple_HASH_XXPRIME_5 ^ 3527539UL))
|
|
|
|
|
|
2018-11-25 23:56:17 +01:00
|
|
|
#ifdef __cplusplus
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-06-22 17:27:35 +02:00
|
|
|
#endif /* !Py_INTERNAL_TUPLE_H */
|