diff --git a/Grammar/python.gram b/Grammar/python.gram index b9ecd2273a5..718caf8a905 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -121,9 +121,9 @@ simple_stmts[asdl_stmt_seq*]: simple_stmt[stmt_ty] (memo): | assignment | &"type" type_alias + | &('import' | 'from' | "lazy" ) import_stmt | e=star_expressions { _PyAST_Expr(e, EXTRA) } | &'return' return_stmt - | &('import' | 'from') import_stmt | &'raise' raise_stmt | &'pass' pass_stmt | &'del' del_stmt @@ -216,7 +216,7 @@ assert_stmt[stmt_ty]: | invalid_assert_stmt | 'assert' a=expression b=[',' z=expression { z }] { _PyAST_Assert(a, b, EXTRA) } -import_stmt[stmt_ty]: +import_stmt[stmt_ty](memo): | invalid_import | import_name | import_from @@ -224,13 +224,16 @@ import_stmt[stmt_ty]: # Import statements # ----------------- -import_name[stmt_ty]: 'import' a=dotted_as_names { _PyAST_Import(a, EXTRA) } +import_name[stmt_ty]: + | 'import' a=dotted_as_names { _PyAST_Import(a, 0, EXTRA) } + | "lazy" 'import' a=dotted_as_names { _PyAST_Import(a, 1, EXTRA) } + # note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS import_from[stmt_ty]: - | 'from' a=('.' | '...')* b=dotted_name 'import' c=import_from_targets { - _PyPegen_checked_future_import(p, b->v.Name.id, c, _PyPegen_seq_count_dots(a), EXTRA) } - | 'from' a=('.' | '...')+ 'import' b=import_from_targets { - _PyAST_ImportFrom(NULL, b, _PyPegen_seq_count_dots(a), EXTRA) } + | lazy="lazy"? 'from' a=('.' | '...')* b=dotted_name 'import' c=import_from_targets { + _PyPegen_checked_future_import(p, b->v.Name.id, c, _PyPegen_seq_count_dots(a), lazy ? 1 : 0, EXTRA) } + | lazy="lazy"? 'from' a=('.' | '...')+ 'import' b=import_from_targets { + _PyAST_ImportFrom(NULL, b, _PyPegen_seq_count_dots(a), lazy ? 1 : 0, EXTRA) } import_from_targets[asdl_alias_seq*]: | '(' a=import_from_as_names [','] ')' { a } | import_from_as_names !',' diff --git a/Include/internal/pycore_ast.h b/Include/internal/pycore_ast.h index 60367202bab..b47398669bb 100644 --- a/Include/internal/pycore_ast.h +++ b/Include/internal/pycore_ast.h @@ -329,12 +329,14 @@ struct _stmt { struct { asdl_alias_seq *names; + int is_lazy; } Import; struct { identifier module; asdl_alias_seq *names; int level; + int is_lazy; } ImportFrom; struct { @@ -764,11 +766,12 @@ stmt_ty _PyAST_TryStar(asdl_stmt_seq * body, asdl_excepthandler_seq * handlers, end_col_offset, PyArena *arena); stmt_ty _PyAST_Assert(expr_ty test, expr_ty msg, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena); -stmt_ty _PyAST_Import(asdl_alias_seq * names, int lineno, int col_offset, int - end_lineno, int end_col_offset, PyArena *arena); +stmt_ty _PyAST_Import(asdl_alias_seq * names, int is_lazy, int lineno, int + col_offset, int end_lineno, int end_col_offset, PyArena + *arena); stmt_ty _PyAST_ImportFrom(identifier module, asdl_alias_seq * names, int level, - int lineno, int col_offset, int end_lineno, int - end_col_offset, PyArena *arena); + int is_lazy, int lineno, int col_offset, int + end_lineno, int end_col_offset, PyArena *arena); stmt_ty _PyAST_Global(asdl_identifier_seq * names, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena); stmt_ty _PyAST_Nonlocal(asdl_identifier_seq * names, int lineno, int diff --git a/Include/internal/pycore_ast_state.h b/Include/internal/pycore_ast_state.h index d4ac419f51d..1caf200ee34 100644 --- a/Include/internal/pycore_ast_state.h +++ b/Include/internal/pycore_ast_state.h @@ -205,6 +205,7 @@ struct ast_state { PyObject *id; PyObject *ifs; PyObject *is_async; + PyObject *is_lazy; PyObject *items; PyObject *iter; PyObject *key; diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 102a378f8f0..801e56113cc 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -297,7 +297,11 @@ PyAPI_FUNC(void) _PyEval_FormatExcCheckArg(PyThreadState *tstate, PyObject *exc, PyAPI_FUNC(void) _PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *co, int oparg); PyAPI_FUNC(void) _PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func, PyObject *kwargs); PyAPI_FUNC(PyObject *) _PyEval_ImportFrom(PyThreadState *, PyObject *, PyObject *); -PyAPI_FUNC(PyObject *) _PyEval_ImportName(PyThreadState *, _PyInterpreterFrame *, PyObject *, PyObject *, PyObject *); +PyAPI_FUNC(PyObject *) _PyEval_LazyImportName(PyThreadState *tstate, PyObject *builtins, PyObject *globals, + PyObject *name, PyObject *fromlist, PyObject *level); +PyAPI_FUNC(PyObject *) _PyEval_LazyImportFrom(PyThreadState *tstate, PyObject *v, PyObject *name); +PyAPI_FUNC(PyObject *) _PyEval_ImportName(PyThreadState *tstate, PyObject *builtins, PyObject *globals, PyObject *locals, + PyObject *name, PyObject *fromlist, PyObject *level); PyAPI_FUNC(PyObject *)_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs); PyAPI_FUNC(PyObject *)_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys); PyAPI_FUNC(void) _PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 13fbff4eb65..4b8ff0e3473 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -31,6 +31,12 @@ extern int _PyImport_FixupBuiltin( PyObject *modules ); +extern PyObject * +_PyImport_ResolveName(PyThreadState *tstate, PyObject *name, PyObject *globals, int level); +extern PyObject * +_PyImport_LoadLazyImportTstate(PyThreadState *tstate, PyObject *lazy_import); + + #ifdef HAVE_DLOPEN # include // RTLD_NOW, RTLD_LAZY # if HAVE_DECL_RTLD_NOW diff --git a/Include/internal/pycore_lazyimportobject.h b/Include/internal/pycore_lazyimportobject.h new file mode 100644 index 00000000000..5a2138d3ac7 --- /dev/null +++ b/Include/internal/pycore_lazyimportobject.h @@ -0,0 +1,29 @@ +/* Copyright (c) Meta, Inc. and its affiliates. All Rights Reserved */ +/* File added for Lazy Imports */ + +/* Lazy object interface */ + +#ifndef Py_LAZYIMPORTOBJECT_H +#define Py_LAZYIMPORTOBJECT_H +#ifdef __cplusplus +extern "C" { +#endif + +PyAPI_DATA(PyTypeObject) PyLazyImport_Type; +#define PyLazyImport_CheckExact(op) Py_IS_TYPE((op), &PyLazyImport_Type) + +typedef struct { + PyObject_HEAD + PyObject *lz_builtins; + PyObject *lz_from; + PyObject *lz_attr; +} PyLazyImportObject; + + +PyAPI_FUNC(PyObject *) _PyLazyImport_GetName(PyObject *lazy_import); +PyAPI_FUNC(PyObject *) _PyLazyImport_New(PyObject *builtins, PyObject *from, PyObject *attr); + +#ifdef __cplusplus +} +#endif +#endif /* !Py_LAZYIMPORTOBJECT_H */ diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h index 7ec7bd1c695..ff8b26a055a 100644 --- a/Include/internal/pycore_magic_number.h +++ b/Include/internal/pycore_magic_number.h @@ -286,6 +286,7 @@ Known values: Python 3.15a1 3653 (Fix handling of opcodes that may leave operands on the stack when optimizing LOAD_FAST) Python 3.15a1 3654 (Fix missing exception handlers in logical expression) Python 3.15a1 3655 (Fix miscompilation of some module-level annotations) + Python 3.15a1 3656 Lazy imports IMPORT_NAME opcode changes Python 3.16 will start with 3700 @@ -299,7 +300,7 @@ PC/launcher.c must also be updated. */ -#define PYC_MAGIC_NUMBER 3655 +#define PYC_MAGIC_NUMBER 3656 /* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes (little-endian) and then appending b'\r\n'. */ #define PYC_MAGIC_NUMBER_TOKEN \ diff --git a/Lib/dis.py b/Lib/dis.py index d6d2c1386dd..f250faf4d9a 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -35,6 +35,7 @@ FUNCTION_ATTR_FLAGS = ('defaults', 'kwdefaults', 'annotations', 'closure', 'annotate') ENTER_EXECUTOR = opmap['ENTER_EXECUTOR'] +IMPORT_NAME = opmap['IMPORT_NAME'] LOAD_GLOBAL = opmap['LOAD_GLOBAL'] LOAD_SMALL_INT = opmap['LOAD_SMALL_INT'] BINARY_OP = opmap['BINARY_OP'] @@ -601,6 +602,12 @@ def get_argval_argrepr(self, op, arg, offset): argval, argrepr = _get_name_info(arg//4, get_name) if (arg & 1) and argrepr: argrepr = f"{argrepr} + NULL|self" + elif deop == IMPORT_NAME: + argval, argrepr = _get_name_info(arg//4, get_name) + if (arg & 1) and argrepr: + argrepr = f"{argrepr} + lazy" + elif (arg & 2) and argrepr: + argrepr = f"{argrepr} + eager" else: argval, argrepr = _get_name_info(arg, get_name) elif deop in hasjump or deop in hasexc: diff --git a/Lib/keyword.py b/Lib/keyword.py index e22c837835e..98ffe2de28b 100644 --- a/Lib/keyword.py +++ b/Lib/keyword.py @@ -56,6 +56,7 @@ kwlist = [ softkwlist = [ '_', 'case', + 'lazy', 'match', 'type' ] diff --git a/Lib/test/test_ast/data/ast_repr.txt b/Lib/test/test_ast/data/ast_repr.txt index 1c1985519cd..f1b4c7b913f 100644 --- a/Lib/test/test_ast/data/ast_repr.txt +++ b/Lib/test/test_ast/data/ast_repr.txt @@ -69,10 +69,10 @@ Module(body=[Try(body=[Pass()], handlers=[ExceptHandler(type=Name(...), name='ex Module(body=[TryStar(body=[Pass()], handlers=[ExceptHandler(type=Name(...), name='exc', body=[Pass(...)])], orelse=[Pass()], finalbody=[Pass()])], type_ignores=[]) Module(body=[Assert(test=Name(id='v', ctx=Load(...)), msg=None)], type_ignores=[]) Module(body=[Assert(test=Name(id='v', ctx=Load(...)), msg=Constant(value='message', kind=None))], type_ignores=[]) -Module(body=[Import(names=[alias(name='sys', asname=None)])], type_ignores=[]) -Module(body=[Import(names=[alias(name='foo', asname='bar')])], type_ignores=[]) -Module(body=[ImportFrom(module='sys', names=[alias(name='x', asname='y')], level=0)], type_ignores=[]) -Module(body=[ImportFrom(module='sys', names=[alias(name='v', asname=None)], level=0)], type_ignores=[]) +Module(body=[Import(names=[alias(name='sys', asname=None)], is_lazy=0)], type_ignores=[]) +Module(body=[Import(names=[alias(name='foo', asname='bar')], is_lazy=0)], type_ignores=[]) +Module(body=[ImportFrom(module='sys', names=[alias(name='x', asname='y')], level=0, is_lazy=0)], type_ignores=[]) +Module(body=[ImportFrom(module='sys', names=[alias(name='v', asname=None)], level=0, is_lazy=0)], type_ignores=[]) Module(body=[Global(names=['v'])], type_ignores=[]) Module(body=[Expr(value=Constant(value=1, kind=None))], type_ignores=[]) Module(body=[Pass()], type_ignores=[]) diff --git a/Lib/test/test_ast/test_ast.py b/Lib/test/test_ast/test_ast.py index 1e6f6007430..e5bc6d381fc 100644 --- a/Lib/test/test_ast/test_ast.py +++ b/Lib/test/test_ast/test_ast.py @@ -1722,8 +1722,8 @@ def check_text(code, empty, full, **kwargs): check_text( "import _ast as ast; from module import sub", - empty="Module(body=[Import(names=[alias(name='_ast', asname='ast')]), ImportFrom(module='module', names=[alias(name='sub')], level=0)])", - full="Module(body=[Import(names=[alias(name='_ast', asname='ast')]), ImportFrom(module='module', names=[alias(name='sub')], level=0)], type_ignores=[])", + empty="Module(body=[Import(names=[alias(name='_ast', asname='ast')], is_lazy=0), ImportFrom(module='module', names=[alias(name='sub')], level=0, is_lazy=0)])", + full="Module(body=[Import(names=[alias(name='_ast', asname='ast')], is_lazy=0), ImportFrom(module='module', names=[alias(name='sub')], level=0, is_lazy=0)], type_ignores=[])", ) def test_copy_location(self): diff --git a/Makefile.pre.in b/Makefile.pre.in index 6651b093e20..06be156537c 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -539,6 +539,7 @@ OBJECT_OBJS= \ Objects/funcobject.o \ Objects/interpolationobject.o \ Objects/iterobject.o \ + Objects/lazyimportobject.o \ Objects/listobject.o \ Objects/longobject.o \ Objects/dictobject.o \ @@ -1368,6 +1369,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_interpolation.h \ $(srcdir)/Include/internal/pycore_intrinsics.h \ $(srcdir)/Include/internal/pycore_jit.h \ + $(srcdir)/Include/internal/pycore_lazyimportobject.h \ $(srcdir)/Include/internal/pycore_list.h \ $(srcdir)/Include/internal/pycore_llist.h \ $(srcdir)/Include/internal/pycore_lock.h \ diff --git a/Objects/lazyimportobject.c b/Objects/lazyimportobject.c new file mode 100644 index 00000000000..973056d5ced --- /dev/null +++ b/Objects/lazyimportobject.c @@ -0,0 +1,137 @@ +/* Copyright (c) Meta, Inc. and its affiliates. All Rights Reserved */ +/* File added for Lazy Imports */ + +/* Lazy object implementation */ + +#include "Python.h" +#include "pycore_lazyimportobject.h" + +PyObject * +_PyLazyImport_New(PyObject *builtins, PyObject *from, PyObject *attr) +{ + PyLazyImportObject *m; + if (!from || !PyUnicode_Check(from)) { + PyErr_BadArgument(); + return NULL; + } + if (attr == Py_None) { + attr = NULL; + } + assert(!attr || PyObject_IsTrue(attr)); + m = PyObject_GC_New(PyLazyImportObject, &PyLazyImport_Type); + if (m == NULL) { + return NULL; + } + Py_XINCREF(builtins); + m->lz_builtins = builtins; + Py_INCREF(from); + m->lz_from = from; + Py_XINCREF(attr); + m->lz_attr = attr; + PyObject_GC_Track(m); + return (PyObject *)m; +} + +static void +lazy_import_dealloc(PyLazyImportObject *m) +{ + PyObject_GC_UnTrack(m); + Py_XDECREF(m->lz_builtins); + Py_XDECREF(m->lz_from); + Py_XDECREF(m->lz_attr); + Py_TYPE(m)->tp_free((PyObject *)m); +} + +static PyObject * +lazy_import_name(PyLazyImportObject *m) +{ + if (m->lz_attr != NULL) { + if (PyUnicode_Check(m->lz_attr)) { + return PyUnicode_FromFormat("%U.%U", m->lz_from, m->lz_attr); + } else { + return PyUnicode_FromFormat("%U...", m->lz_from); + } + } + Py_INCREF(m->lz_from); + return m->lz_from; +} + +static PyObject * +lazy_import_repr(PyLazyImportObject *m) +{ + PyObject *name = lazy_import_name(m); + if (name == NULL) { + return NULL; + } + PyObject *res = PyUnicode_FromFormat("", name); + Py_DECREF(name); + return res; +} + +static int +lazy_import_traverse(PyLazyImportObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->lz_builtins); + Py_VISIT(m->lz_from); + Py_VISIT(m->lz_attr); + return 0; +} + +static int +lazy_import_clear(PyLazyImportObject *m) +{ + Py_CLEAR(m->lz_builtins); + Py_CLEAR(m->lz_from); + Py_CLEAR(m->lz_attr); + return 0; +} + +PyObject * +_PyLazyImport_GetName(PyObject *lazy_import) +{ + assert(PyLazyImport_CheckExact(lazy_import)); + return lazy_import_name((PyLazyImportObject *)lazy_import); +} + +PyTypeObject PyLazyImport_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + "lazy_import", /* tp_name */ + sizeof(PyLazyImportObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)lazy_import_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + (reprfunc)lazy_import_repr, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)lazy_import_traverse, /* tp_traverse */ + (inquiry)lazy_import_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + PyType_GenericAlloc, /* tp_alloc */ + PyType_GenericNew, /* tp_new */ + PyObject_GC_Del, /* tp_free */ +}; diff --git a/Parser/Python.asdl b/Parser/Python.asdl index 96f3914b029..6579642540e 100644 --- a/Parser/Python.asdl +++ b/Parser/Python.asdl @@ -45,8 +45,8 @@ module Python | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) | Assert(expr test, expr? msg) - | Import(alias* names) - | ImportFrom(identifier? module, alias* names, int? level) + | Import(alias* names, int? is_lazy) + | ImportFrom(identifier? module, alias* names, int? level, int? is_lazy) | Global(identifier* names) | Nonlocal(identifier* names) diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 57e46b4399c..cfd01e3a780 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -1916,7 +1916,7 @@ _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings, stmt_ty _PyPegen_checked_future_import(Parser *p, identifier module, asdl_alias_seq * names, int level, - int lineno, int col_offset, int end_lineno, int end_col_offset, + int is_lazy, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena) { if (level == 0 && PyUnicode_CompareWithASCIIString(module, "__future__") == 0) { for (Py_ssize_t i = 0; i < asdl_seq_LEN(names); i++) { @@ -1926,7 +1926,7 @@ _PyPegen_checked_future_import(Parser *p, identifier module, asdl_alias_seq * na } } } - return _PyAST_ImportFrom(module, names, level, lineno, col_offset, end_lineno, end_col_offset, arena); + return _PyAST_ImportFrom(module, names, level, is_lazy, lineno, col_offset, end_lineno, end_col_offset, arena); } asdl_stmt_seq* diff --git a/Parser/parser.c b/Parser/parser.c index 8242c4dfabb..daea54b0d25 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -21,54 +21,54 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 691}, - {"as", 689}, - {"in", 704}, - {"or", 589}, - {"is", 597}, + {"if", 692}, + {"as", 690}, + {"in", 705}, + {"or", 590}, + {"is", 598}, {NULL, -1}, }, (KeywordToken[]) { - {"del", 630}, - {"def", 708}, - {"for", 703}, - {"try", 665}, - {"and", 590}, - {"not", 712}, + {"del", 631}, + {"def", 709}, + {"for", 704}, + {"try", 666}, + {"and", 591}, + {"not", 713}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 642}, + {"from", 643}, {"pass", 527}, - {"with", 656}, - {"elif", 696}, - {"else", 695}, - {"None", 624}, - {"True", 623}, + {"with", 657}, + {"elif", 697}, + {"else", 696}, + {"None", 625}, + {"True", 624}, {NULL, -1}, }, (KeywordToken[]) { - {"raise", 628}, - {"yield", 588}, + {"raise", 629}, + {"yield", 589}, {"break", 528}, - {"async", 707}, - {"class", 710}, - {"while", 698}, - {"False", 625}, - {"await", 598}, + {"async", 708}, + {"class", 711}, + {"while", 699}, + {"False", 626}, + {"await", 599}, {NULL, -1}, }, (KeywordToken[]) { + {"import", 644}, {"return", 522}, - {"import", 643}, - {"assert", 634}, + {"assert", 635}, {"global", 530}, - {"except", 686}, - {"lambda", 622}, + {"except", 687}, + {"lambda", 623}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 682}, + {"finally", 683}, {NULL, -1}, }, (KeywordToken[]) { @@ -80,6 +80,7 @@ static KeywordToken *reserved_keywords[] = { static char *soft_keywords[] = { "_", "case", + "lazy", "match", "type", NULL, @@ -1544,9 +1545,9 @@ simple_stmts_rule(Parser *p) // simple_stmt: // | assignment // | &"type" type_alias +// | &('import' | 'from' | "lazy") import_stmt // | star_expressions // | &'return' return_stmt -// | &('import' | 'from') import_stmt // | &'raise' raise_stmt // | &'pass' pass_stmt // | &'del' del_stmt @@ -1621,6 +1622,27 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&\"type\" type_alias")); } + { // &('import' | 'from' | "lazy") import_stmt + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from' | \"lazy\") import_stmt")); + stmt_ty import_stmt_var; + if ( + _PyPegen_lookahead(1, _tmp_5_rule, p) + && + (import_stmt_var = import_stmt_rule(p)) // import_stmt + ) + { + D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('import' | 'from' | \"lazy\") import_stmt")); + _res = import_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('import' | 'from' | \"lazy\") import_stmt")); + } { // star_expressions if (p->error_indicator) { p->level--; @@ -1675,27 +1697,6 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'return' return_stmt")); } - { // &('import' | 'from') import_stmt - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); - stmt_ty import_stmt_var; - if ( - _PyPegen_lookahead(1, _tmp_5_rule, p) - && - (import_stmt_var = import_stmt_rule(p)) // import_stmt - ) - { - D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); - _res = import_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('import' | 'from') import_stmt")); - } { // &'raise' raise_stmt if (p->error_indicator) { p->level--; @@ -1704,7 +1705,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); stmt_ty raise_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 628) // token='raise' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 629) // token='raise' && (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) @@ -1746,7 +1747,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 630) // token='del' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 631) // token='del' && (del_stmt_var = del_stmt_rule(p)) // del_stmt ) @@ -1767,7 +1768,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); stmt_ty yield_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 588) // token='yield' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 589) // token='yield' && (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) @@ -1788,7 +1789,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); stmt_ty assert_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 634) // token='assert' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 635) // token='assert' && (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) @@ -1942,7 +1943,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 691) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 692) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2026,7 +2027,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 665) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 666) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2047,7 +2048,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 698) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 699) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -2810,11 +2811,11 @@ raise_stmt_rule(Parser *p) expr_ty a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 628)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 629)) // token='raise' && (a = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 642)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='from' && (b = expression_rule(p)) // expression ) @@ -2869,7 +2870,7 @@ raise_stmt_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 628)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 629)) // token='raise' && (a = expression_rule(p)) // expression ) @@ -2904,7 +2905,7 @@ raise_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 628)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 629)) // token='raise' ) { D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'")); @@ -3277,7 +3278,7 @@ del_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 630)) // token='del' + (_keyword = _PyPegen_expect_token(p, 631)) // token='del' && (a = del_targets_rule(p)) // del_targets && @@ -3443,7 +3444,7 @@ assert_stmt_rule(Parser *p) expr_ty a; void *b; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 635)) // token='assert' && (a = expression_rule(p)) // expression && @@ -3490,6 +3491,10 @@ import_stmt_rule(Parser *p) return NULL; } stmt_ty _res = NULL; + if (_PyPegen_is_memoized(p, import_stmt_type, &_res)) { + p->level--; + return _res; + } int _mark = p->mark; if (p->call_invalid_rules) { // invalid_import if (p->error_indicator) { @@ -3550,11 +3555,12 @@ import_stmt_rule(Parser *p) } _res = NULL; done: + _PyPegen_insert_memo(p, _mark, import_stmt_type, _res); p->level--; return _res; } -// import_name: 'import' dotted_as_names +// import_name: 'import' dotted_as_names | "lazy" 'import' dotted_as_names static stmt_ty import_name_rule(Parser *p) { @@ -3585,7 +3591,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='import' + (_keyword = _PyPegen_expect_token(p, 644)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3600,7 +3606,7 @@ import_name_rule(Parser *p) UNUSED(_end_lineno); // Only used by EXTRA macro int _end_col_offset = _token->end_col_offset; UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyAST_Import ( a , EXTRA ); + _res = _PyAST_Import ( a , 0 , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -3612,6 +3618,45 @@ import_name_rule(Parser *p) D(fprintf(stderr, "%*c%s import_name[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import' dotted_as_names")); } + { // "lazy" 'import' dotted_as_names + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> import_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"lazy\" 'import' dotted_as_names")); + expr_ty _keyword; + Token * _keyword_1; + asdl_alias_seq* a; + if ( + (_keyword = _PyPegen_expect_soft_keyword(p, "lazy")) // soft_keyword='"lazy"' + && + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='import' + && + (a = dotted_as_names_rule(p)) // dotted_as_names + ) + { + D(fprintf(stderr, "%*c+ import_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"lazy\" 'import' dotted_as_names")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + p->level--; + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyAST_Import ( a , 1 , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"lazy\" 'import' dotted_as_names")); + } _res = NULL; done: p->level--; @@ -3619,8 +3664,8 @@ import_name_rule(Parser *p) } // import_from: -// | 'from' (('.' | '...'))* dotted_name 'import' import_from_targets -// | 'from' (('.' | '...'))+ 'import' import_from_targets +// | "lazy"? 'from' (('.' | '...'))* dotted_name 'import' import_from_targets +// | "lazy"? 'from' (('.' | '...'))+ 'import' import_from_targets static stmt_ty import_from_rule(Parser *p) { @@ -3642,30 +3687,33 @@ import_from_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + { // "lazy"? 'from' (('.' | '...'))* dotted_name 'import' import_from_targets if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); + D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"lazy\"? 'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); Token * _keyword; Token * _keyword_1; asdl_seq * a; expr_ty b; asdl_alias_seq* c; + void *lazy; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='from' + (lazy = _PyPegen_expect_soft_keyword(p, "lazy"), !p->error_indicator) // "lazy"? + && + (_keyword = _PyPegen_expect_token(p, 643)) // token='from' && (a = _loop0_17_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) { - D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); + D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"lazy\"? 'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -3675,7 +3723,7 @@ import_from_rule(Parser *p) UNUSED(_end_lineno); // Only used by EXTRA macro int _end_col_offset = _token->end_col_offset; UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyPegen_checked_future_import ( p , b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); + _res = _PyPegen_checked_future_import ( p , b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , lazy ? 1 : 0 , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -3685,29 +3733,32 @@ import_from_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"lazy\"? 'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); } - { // 'from' (('.' | '...'))+ 'import' import_from_targets + { // "lazy"? 'from' (('.' | '...'))+ 'import' import_from_targets if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); + D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"lazy\"? 'from' (('.' | '...'))+ 'import' import_from_targets")); Token * _keyword; Token * _keyword_1; asdl_seq * a; asdl_alias_seq* b; + void *lazy; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='from' + (lazy = _PyPegen_expect_soft_keyword(p, "lazy"), !p->error_indicator) // "lazy"? + && + (_keyword = _PyPegen_expect_token(p, 643)) // token='from' && (a = _loop1_18_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) { - D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); + D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"lazy\"? 'from' (('.' | '...'))+ 'import' import_from_targets")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -3717,7 +3768,7 @@ import_from_rule(Parser *p) UNUSED(_end_lineno); // Only used by EXTRA macro int _end_col_offset = _token->end_col_offset; UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyAST_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); + _res = _PyAST_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , lazy ? 1 : 0 , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -3727,7 +3778,7 @@ import_from_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"lazy\"? 'from' (('.' | '...'))+ 'import' import_from_targets")); } _res = NULL; done: @@ -4489,7 +4540,7 @@ class_def_raw_rule(Parser *p) asdl_stmt_seq* c; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 710)) // token='class' + (_keyword = _PyPegen_expect_token(p, 711)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4656,7 +4707,7 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 708)) // token='def' + (_keyword = _PyPegen_expect_token(p, 709)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4717,9 +4768,9 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 708)) // token='def' + (_keyword_1 = _PyPegen_expect_token(p, 709)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -6057,7 +6108,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6102,7 +6153,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6197,7 +6248,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 696)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 697)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6242,7 +6293,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 696)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 697)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6323,7 +6374,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword = _PyPegen_expect_token(p, 696)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6402,7 +6453,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='while' + (_keyword = _PyPegen_expect_token(p, 699)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6502,11 +6553,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword = _PyPegen_expect_token(p, 704)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 705)) // token='in' && (_cut_var = 1) && @@ -6564,13 +6615,13 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 705)) // token='in' && (_cut_var = 1) && @@ -6699,7 +6750,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword = _PyPegen_expect_token(p, 657)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6750,7 +6801,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword = _PyPegen_expect_token(p, 657)) // token='with' && (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+ && @@ -6799,9 +6850,9 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 657)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6851,9 +6902,9 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 657)) // token='with' && (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+ && @@ -6939,7 +6990,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -7064,7 +7115,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='try' + (_keyword = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7108,7 +7159,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='try' + (_keyword = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7156,7 +7207,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='try' + (_keyword = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7255,7 +7306,7 @@ except_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (e = expression_rule(p)) // expression && @@ -7299,11 +7350,11 @@ except_block_rule(Parser *p) expr_ty e; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (e = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='as' && (t = _PyPegen_name_token(p)) // NAME && @@ -7345,7 +7396,7 @@ except_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (e = expressions_rule(p)) // expressions && @@ -7386,7 +7437,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7498,7 +7549,7 @@ except_star_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7545,13 +7596,13 @@ except_star_block_rule(Parser *p) expr_ty e; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (e = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='as' && (t = _PyPegen_name_token(p)) // NAME && @@ -7594,7 +7645,7 @@ except_star_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7694,7 +7745,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 683)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -8002,7 +8053,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -8197,7 +8248,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -8631,7 +8682,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='None' + (_keyword = _PyPegen_expect_token(p, 625)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8664,7 +8715,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='True' + (_keyword = _PyPegen_expect_token(p, 624)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8697,7 +8748,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='False' + (_keyword = _PyPegen_expect_token(p, 626)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -8825,7 +8876,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='None' + (_keyword = _PyPegen_expect_token(p, 625)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8858,7 +8909,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='True' + (_keyword = _PyPegen_expect_token(p, 624)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8891,7 +8942,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='False' + (_keyword = _PyPegen_expect_token(p, 626)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -11494,11 +11545,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='else' && (c = expression_rule(p)) // expression ) @@ -11602,9 +11653,9 @@ yield_expr_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 588)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 589)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 642)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='from' && (a = expression_rule(p)) // expression ) @@ -11640,7 +11691,7 @@ yield_expr_rule(Parser *p) Token * _keyword; void *a; if ( - (_keyword = _PyPegen_expect_token(p, 588)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 589)) // token='yield' && (a = star_expressions_rule(p), !p->error_indicator) // star_expressions? ) @@ -12380,7 +12431,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 712)) // token='not' + (_keyword = _PyPegen_expect_token(p, 713)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -13034,9 +13085,9 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 712)) // token='not' + (_keyword = _PyPegen_expect_token(p, 713)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 705)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13082,7 +13133,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword = _PyPegen_expect_token(p, 705)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13129,9 +13180,9 @@ isnot_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 597)) // token='is' + (_keyword = _PyPegen_expect_token(p, 598)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 712)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 713)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13177,7 +13228,7 @@ is_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 597)) // token='is' + (_keyword = _PyPegen_expect_token(p, 598)) // token='is' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -14493,7 +14544,7 @@ await_primary_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 598)) // token='await' + (_keyword = _PyPegen_expect_token(p, 599)) // token='await' && (a = primary_rule(p)) // primary ) @@ -15037,7 +15088,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='True' + (_keyword = _PyPegen_expect_token(p, 624)) // token='True' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -15070,7 +15121,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='False' + (_keyword = _PyPegen_expect_token(p, 626)) // token='False' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -15103,7 +15154,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='None' + (_keyword = _PyPegen_expect_token(p, 625)) // token='None' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -15371,7 +15422,7 @@ lambdef_rule(Parser *p) void *a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 622)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 623)) // token='lambda' && (a = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -17790,13 +17841,13 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 705)) // token='in' && (_cut_var = 1) && @@ -17835,11 +17886,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword = _PyPegen_expect_token(p, 704)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 705)) // token='in' && (_cut_var = 1) && @@ -21140,11 +21191,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='else' && (c = expression_rule(p)) // expression ) @@ -21444,7 +21495,7 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (b = disjunction_rule(p)) // disjunction && @@ -21477,11 +21528,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='else' && _PyPegen_lookahead_for_expr(0, expression_rule, p) ) @@ -21513,11 +21564,11 @@ invalid_expression_rule(Parser *p) if ( (a = (stmt_ty)_tmp_116_rule(p)) // pass_stmt | break_stmt | continue_stmt && - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='else' && (c = simple_stmt_rule(p)) // simple_stmt ) @@ -21546,7 +21597,7 @@ invalid_expression_rule(Parser *p) Token * a; Token * b; if ( - (a = _PyPegen_expect_token(p, 622)) // token='lambda' + (a = _PyPegen_expect_token(p, 623)) // token='lambda' && (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -21579,7 +21630,7 @@ invalid_expression_rule(Parser *p) Token * a; Token * b; if ( - (a = _PyPegen_expect_token(p, 622)) // token='lambda' + (a = _PyPegen_expect_token(p, 623)) // token='lambda' && (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -22050,9 +22101,9 @@ invalid_raise_stmt_rule(Parser *p) Token * a; Token * b; if ( - (a = _PyPegen_expect_token(p, 628)) // token='raise' + (a = _PyPegen_expect_token(p, 629)) // token='raise' && - (b = _PyPegen_expect_token(p, 642)) // token='from' + (b = _PyPegen_expect_token(p, 643)) // token='from' ) { D(fprintf(stderr, "%*c+ invalid_raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' 'from'")); @@ -22078,11 +22129,11 @@ invalid_raise_stmt_rule(Parser *p) Token * a; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 628)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 629)) // token='raise' && (expression_var = expression_rule(p)) // expression && - (a = _PyPegen_expect_token(p, 642)) // token='from' + (a = _PyPegen_expect_token(p, 643)) // token='from' ) { D(fprintf(stderr, "%*c+ invalid_raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression 'from'")); @@ -22126,7 +22177,7 @@ invalid_del_stmt_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 630)) // token='del' + (_keyword = _PyPegen_expect_token(p, 631)) // token='del' && (a = star_expressions_rule(p)) // star_expressions ) @@ -22178,7 +22229,7 @@ invalid_assert_stmt_rule(Parser *p) expr_ty a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 635)) // token='assert' && (a = expression_rule(p)) // expression && @@ -22213,7 +22264,7 @@ invalid_assert_stmt_rule(Parser *p) expr_ty b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 635)) // token='assert' && (expression_var = expression_rule(p)) // expression && @@ -22250,7 +22301,7 @@ invalid_assert_stmt_rule(Parser *p) expr_ty a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 635)) // token='assert' && (a = expression_rule(p)) // expression && @@ -22285,7 +22336,7 @@ invalid_assert_stmt_rule(Parser *p) expr_ty b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 635)) // token='assert' && (expression_var = expression_rule(p)) // expression && @@ -23729,7 +23780,7 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (a = expression_rule(p)) // expression && @@ -23779,13 +23830,13 @@ invalid_for_if_clause_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings void *_tmp_135_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword = _PyPegen_expect_token(p, 704)) // token='for' && (_tmp_135_var = _tmp_135_rule(p)) // bitwise_or ((',' bitwise_or))* ','? && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 704) // token='in' + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 705) // token='in' ) { D(fprintf(stderr, "%*c+ invalid_for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); @@ -23831,9 +23882,9 @@ invalid_for_target_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings expr_ty a; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword = _PyPegen_expect_token(p, 704)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -23963,11 +24014,11 @@ invalid_import_rule(Parser *p) Token * a; expr_ty dotted_name_var; if ( - (a = _PyPegen_expect_token(p, 643)) // token='import' + (a = _PyPegen_expect_token(p, 644)) // token='import' && (_gather_137_var = _gather_137_rule(p)) // ','.dotted_name+ && - (_keyword = _PyPegen_expect_token(p, 642)) // token='from' + (_keyword = _PyPegen_expect_token(p, 643)) // token='from' && (dotted_name_var = dotted_name_rule(p)) // dotted_name ) @@ -23994,7 +24045,7 @@ invalid_import_rule(Parser *p) Token * _keyword; Token * token; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='import' + (_keyword = _PyPegen_expect_token(p, 644)) // token='import' && (token = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -24043,7 +24094,7 @@ invalid_dotted_as_name_rule(Parser *p) if ( (dotted_name_var = dotted_name_rule(p)) // dotted_name && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && _PyPegen_lookahead(0, _tmp_138_rule, p) && @@ -24094,7 +24145,7 @@ invalid_import_from_as_name_rule(Parser *p) if ( (name_var = _PyPegen_name_token(p)) // NAME && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && _PyPegen_lookahead(0, _tmp_138_rule, p) && @@ -24220,9 +24271,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword = _PyPegen_expect_token(p, 657)) // token='with' && (_gather_140_var = _gather_140_rule(p)) // ','.(expression ['as' star_target])+ && @@ -24258,9 +24309,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var_1); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword = _PyPegen_expect_token(p, 657)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -24320,9 +24371,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 656)) // token='with' + (a = _PyPegen_expect_token(p, 657)) // token='with' && (_gather_140_var = _gather_140_rule(p)) // ','.(expression ['as' star_target])+ && @@ -24363,9 +24414,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 656)) // token='with' + (a = _PyPegen_expect_token(p, 657)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -24428,7 +24479,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 665)) // token='try' + (a = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24460,7 +24511,7 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='try' + (_keyword = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24499,7 +24550,7 @@ invalid_try_stmt_rule(Parser *p) Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='try' + (_keyword = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24507,7 +24558,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_36_var = _loop1_36_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24546,7 +24597,7 @@ invalid_try_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='try' + (_keyword = _PyPegen_expect_token(p, 666)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24554,7 +24605,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_37_var = _loop1_37_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [expression ['as' NAME]] && @@ -24611,7 +24662,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expressions_var; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (a = expression_rule(p)) // expression && @@ -24619,7 +24670,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_keyword_1 = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24651,7 +24702,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -24682,7 +24733,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -24713,11 +24764,11 @@ invalid_except_stmt_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='as' && (a = expression_rule(p)) // expression && @@ -24777,7 +24828,7 @@ invalid_except_star_stmt_rule(Parser *p) expr_ty expressions_var; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24787,7 +24838,7 @@ invalid_except_star_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_keyword_1 = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24820,7 +24871,7 @@ invalid_except_star_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24854,7 +24905,7 @@ invalid_except_star_stmt_rule(Parser *p) void *_tmp_146_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24888,13 +24939,13 @@ invalid_except_star_stmt_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='as' && (a = expression_rule(p)) // expression && @@ -24945,7 +24996,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 682)) // token='finally' + (a = _PyPegen_expect_token(p, 683)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -25001,7 +25052,7 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -25037,7 +25088,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -25093,7 +25144,7 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='except' + (a = _PyPegen_expect_token(p, 687)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -25332,7 +25383,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -25362,7 +25413,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (a = expression_rule(p)) // expression ) @@ -25514,7 +25565,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25545,7 +25596,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 691)) // token='if' + (a = _PyPegen_expect_token(p, 692)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -25600,7 +25651,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 696)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 697)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25631,7 +25682,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 696)) // token='elif' + (a = _PyPegen_expect_token(p, 697)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25684,7 +25735,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 695)) // token='else' + (a = _PyPegen_expect_token(p, 696)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -25717,13 +25768,13 @@ invalid_else_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword = _PyPegen_expect_token(p, 696)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='elif' + (_keyword_1 = _PyPegen_expect_token(p, 697)) // token='elif' ) { D(fprintf(stderr, "%*c+ invalid_else_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block 'elif'")); @@ -25770,7 +25821,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='while' + (_keyword = _PyPegen_expect_token(p, 699)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25801,7 +25852,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 698)) // token='while' + (a = _PyPegen_expect_token(p, 699)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25860,13 +25911,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword = _PyPegen_expect_token(p, 704)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 705)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -25901,13 +25952,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 703)) // token='for' + (a = _PyPegen_expect_token(p, 704)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 704)) // token='in' + (_keyword = _PyPegen_expect_token(p, 705)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -25973,9 +26024,9 @@ invalid_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 708)) // token='def' + (a = _PyPegen_expect_token(p, 709)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -26032,9 +26083,9 @@ invalid_def_raw_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty name_var; if ( - (_opt_var = _PyPegen_expect_token(p, 707), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 708), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 708)) // token='def' + (_keyword = _PyPegen_expect_token(p, 709)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -26098,7 +26149,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 710)) // token='class' + (_keyword = _PyPegen_expect_token(p, 711)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -26137,7 +26188,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 710)) // token='class' + (a = _PyPegen_expect_token(p, 711)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -27472,7 +27523,7 @@ invalid_arithmetic_rule(Parser *p) && (_tmp_154_var = _tmp_154_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@' && - (a = _PyPegen_expect_token(p, 712)) // token='not' + (a = _PyPegen_expect_token(p, 713)) // token='not' && (b = inversion_rule(p)) // inversion ) @@ -27521,7 +27572,7 @@ invalid_factor_rule(Parser *p) if ( (_tmp_155_var = _tmp_155_rule(p)) // '+' | '-' | '~' && - (a = _PyPegen_expect_token(p, 712)) // token='not' + (a = _PyPegen_expect_token(p, 713)) // token='not' && (b = factor_rule(p)) // factor ) @@ -27847,7 +27898,7 @@ _gather_4_rule(Parser *p) return _res; } -// _tmp_5: 'import' | 'from' +// _tmp_5: 'import' | 'from' | "lazy" static void * _tmp_5_rule(Parser *p) { @@ -27868,7 +27919,7 @@ _tmp_5_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='import' + (_keyword = _PyPegen_expect_token(p, 644)) // token='import' ) { D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); @@ -27887,7 +27938,7 @@ _tmp_5_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='from' + (_keyword = _PyPegen_expect_token(p, 643)) // token='from' ) { D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); @@ -27898,6 +27949,25 @@ _tmp_5_rule(Parser *p) D(fprintf(stderr, "%*c%s _tmp_5[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'")); } + { // "lazy" + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"lazy\"")); + expr_ty _keyword; + if ( + (_keyword = _PyPegen_expect_soft_keyword(p, "lazy")) // soft_keyword='"lazy"' + ) + { + D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"lazy\"")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_5[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"lazy\"")); + } _res = NULL; done: p->level--; @@ -27925,7 +27995,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 708)) // token='def' + (_keyword = _PyPegen_expect_token(p, 709)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -27963,7 +28033,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -28001,7 +28071,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 710)) // token='class' + (_keyword = _PyPegen_expect_token(p, 711)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -28058,7 +28128,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='with' + (_keyword = _PyPegen_expect_token(p, 657)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -28077,7 +28147,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -28115,7 +28185,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 703)) // token='for' + (_keyword = _PyPegen_expect_token(p, 704)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -28134,7 +28204,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='async' + (_keyword = _PyPegen_expect_token(p, 708)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -28835,7 +28905,7 @@ _tmp_21_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -34422,7 +34492,7 @@ _tmp_111_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='True' + (_keyword = _PyPegen_expect_token(p, 624)) // token='True' ) { D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -34441,7 +34511,7 @@ _tmp_111_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='False' + (_keyword = _PyPegen_expect_token(p, 626)) // token='False' ) { D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -34460,7 +34530,7 @@ _tmp_111_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='None' + (_keyword = _PyPegen_expect_token(p, 625)) // token='None' ) { D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -34671,7 +34741,7 @@ _tmp_115_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='else' + (_keyword = _PyPegen_expect_token(p, 696)) // token='else' ) { D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); @@ -34918,7 +34988,7 @@ _tmp_118_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='True' + (_keyword = _PyPegen_expect_token(p, 624)) // token='True' ) { D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -34937,7 +35007,7 @@ _tmp_118_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='None' + (_keyword = _PyPegen_expect_token(p, 625)) // token='None' ) { D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -34956,7 +35026,7 @@ _tmp_118_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='False' + (_keyword = _PyPegen_expect_token(p, 626)) // token='False' ) { D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -36410,7 +36480,7 @@ _tmp_143_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='except' + (_keyword = _PyPegen_expect_token(p, 687)) // token='except' ) { D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); @@ -36429,7 +36499,7 @@ _tmp_143_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 683)) // token='finally' ) { D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); @@ -37484,7 +37554,7 @@ _tmp_160_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 589)) // token='or' + (_keyword = _PyPegen_expect_token(p, 590)) // token='or' && (c = conjunction_rule(p)) // conjunction ) @@ -37530,7 +37600,7 @@ _tmp_161_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 590)) // token='and' + (_keyword = _PyPegen_expect_token(p, 591)) // token='and' && (c = inversion_rule(p)) // inversion ) @@ -37633,7 +37703,7 @@ _tmp_163_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 691)) // token='if' + (_keyword = _PyPegen_expect_token(p, 692)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -38291,7 +38361,7 @@ _tmp_176_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 689)) // token='as' + (_keyword = _PyPegen_expect_token(p, 690)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) diff --git a/Parser/pegen.h b/Parser/pegen.h index 6b49b3537a0..0473db4ff68 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -366,7 +366,7 @@ void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); expr_ty _PyPegen_get_last_comprehension_item(comprehension_ty comprehension); void *_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq *comprehensions); stmt_ty _PyPegen_checked_future_import(Parser *p, identifier module, asdl_alias_seq *, - int , int, int , int , int , PyArena *); + int , int, int , int , int , int, PyArena *); asdl_stmt_seq* _PyPegen_register_stmts(Parser *p, asdl_stmt_seq* stmts); stmt_ty _PyPegen_register_stmt(Parser *p, stmt_ty s); diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h index dbeedb7ffe0..f808544045e 100644 --- a/Programs/test_frozenmain.h +++ b/Programs/test_frozenmain.h @@ -2,7 +2,7 @@ unsigned char M_test_frozenmain[] = { 227,0,0,0,0,0,0,0,0,0,0,0,0,9,0,0, 0,0,0,0,0,243,184,0,0,0,128,0,94,0,82,1, - 73,0,116,0,94,0,82,1,73,1,116,1,93,2,33,0, + 73,0,116,0,94,0,82,1,73,4,116,1,93,2,33,0, 82,2,52,1,0,0,0,0,0,0,31,0,93,2,33,0, 82,3,93,0,80,6,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,52,2,0,0,0,0,0,0, diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 660bc598a48..2bb9003fa29 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -222,6 +222,7 @@ void _PyAST_Fini(PyInterpreterState *interp) Py_CLEAR(state->id); Py_CLEAR(state->ifs); Py_CLEAR(state->is_async); + Py_CLEAR(state->is_lazy); Py_CLEAR(state->items); Py_CLEAR(state->iter); Py_CLEAR(state->key); @@ -327,6 +328,7 @@ static int init_identifiers(struct ast_state *state) if ((state->id = PyUnicode_InternFromString("id")) == NULL) return -1; if ((state->ifs = PyUnicode_InternFromString("ifs")) == NULL) return -1; if ((state->is_async = PyUnicode_InternFromString("is_async")) == NULL) return -1; + if ((state->is_lazy = PyUnicode_InternFromString("is_lazy")) == NULL) return -1; if ((state->items = PyUnicode_InternFromString("items")) == NULL) return -1; if ((state->iter = PyUnicode_InternFromString("iter")) == NULL) return -1; if ((state->key = PyUnicode_InternFromString("key")) == NULL) return -1; @@ -527,11 +529,13 @@ static const char * const Assert_fields[]={ }; static const char * const Import_fields[]={ "names", + "is_lazy", }; static const char * const ImportFrom_fields[]={ "module", "names", "level", + "is_lazy", }; static const char * const Global_fields[]={ "names", @@ -2254,6 +2258,21 @@ add_ast_annotations(struct ast_state *state) return 0; } } + { + PyObject *type = (PyObject *)&PyLong_Type; + type = _Py_union_type_or(type, Py_None); + cond = type != NULL; + if (!cond) { + Py_DECREF(Import_annotations); + return 0; + } + cond = PyDict_SetItemString(Import_annotations, "is_lazy", type) == 0; + Py_DECREF(type); + if (!cond) { + Py_DECREF(Import_annotations); + return 0; + } + } cond = PyObject_SetAttrString(state->Import_type, "_field_types", Import_annotations) == 0; if (!cond) { @@ -2315,6 +2334,22 @@ add_ast_annotations(struct ast_state *state) return 0; } } + { + PyObject *type = (PyObject *)&PyLong_Type; + type = _Py_union_type_or(type, Py_None); + cond = type != NULL; + if (!cond) { + Py_DECREF(ImportFrom_annotations); + return 0; + } + cond = PyDict_SetItemString(ImportFrom_annotations, "is_lazy", type) == + 0; + Py_DECREF(type); + if (!cond) { + Py_DECREF(ImportFrom_annotations); + return 0; + } + } cond = PyObject_SetAttrString(state->ImportFrom_type, "_field_types", ImportFrom_annotations) == 0; if (!cond) { @@ -6218,8 +6253,8 @@ init_types(void *arg) " | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)\n" " | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)\n" " | Assert(expr test, expr? msg)\n" - " | Import(alias* names)\n" - " | ImportFrom(identifier? module, alias* names, int? level)\n" + " | Import(alias* names, int? is_lazy)\n" + " | ImportFrom(identifier? module, alias* names, int? level, int? is_lazy)\n" " | Global(identifier* names)\n" " | Nonlocal(identifier* names)\n" " | Expr(expr value)\n" @@ -6348,17 +6383,21 @@ init_types(void *arg) if (PyObject_SetAttr(state->Assert_type, state->msg, Py_None) == -1) return -1; state->Import_type = make_type(state, "Import", state->stmt_type, - Import_fields, 1, - "Import(alias* names)"); + Import_fields, 2, + "Import(alias* names, int? is_lazy)"); if (!state->Import_type) return -1; + if (PyObject_SetAttr(state->Import_type, state->is_lazy, Py_None) == -1) + return -1; state->ImportFrom_type = make_type(state, "ImportFrom", state->stmt_type, - ImportFrom_fields, 3, - "ImportFrom(identifier? module, alias* names, int? level)"); + ImportFrom_fields, 4, + "ImportFrom(identifier? module, alias* names, int? level, int? is_lazy)"); if (!state->ImportFrom_type) return -1; if (PyObject_SetAttr(state->ImportFrom_type, state->module, Py_None) == -1) return -1; if (PyObject_SetAttr(state->ImportFrom_type, state->level, Py_None) == -1) return -1; + if (PyObject_SetAttr(state->ImportFrom_type, state->is_lazy, Py_None) == -1) + return -1; state->Global_type = make_type(state, "Global", state->stmt_type, Global_fields, 1, "Global(identifier* names)"); @@ -7598,8 +7637,8 @@ _PyAST_Assert(expr_ty test, expr_ty msg, int lineno, int col_offset, int } stmt_ty -_PyAST_Import(asdl_alias_seq * names, int lineno, int col_offset, int - end_lineno, int end_col_offset, PyArena *arena) +_PyAST_Import(asdl_alias_seq * names, int is_lazy, int lineno, int col_offset, + int end_lineno, int end_col_offset, PyArena *arena) { stmt_ty p; p = (stmt_ty)_PyArena_Malloc(arena, sizeof(*p)); @@ -7607,6 +7646,7 @@ _PyAST_Import(asdl_alias_seq * names, int lineno, int col_offset, int return NULL; p->kind = Import_kind; p->v.Import.names = names; + p->v.Import.is_lazy = is_lazy; p->lineno = lineno; p->col_offset = col_offset; p->end_lineno = end_lineno; @@ -7616,8 +7656,8 @@ _PyAST_Import(asdl_alias_seq * names, int lineno, int col_offset, int stmt_ty _PyAST_ImportFrom(identifier module, asdl_alias_seq * names, int level, int - lineno, int col_offset, int end_lineno, int end_col_offset, - PyArena *arena) + is_lazy, int lineno, int col_offset, int end_lineno, int + end_col_offset, PyArena *arena) { stmt_ty p; p = (stmt_ty)_PyArena_Malloc(arena, sizeof(*p)); @@ -7627,6 +7667,7 @@ _PyAST_ImportFrom(identifier module, asdl_alias_seq * names, int level, int p->v.ImportFrom.module = module; p->v.ImportFrom.names = names; p->v.ImportFrom.level = level; + p->v.ImportFrom.is_lazy = is_lazy; p->lineno = lineno; p->col_offset = col_offset; p->end_lineno = end_lineno; @@ -9465,6 +9506,11 @@ ast2obj_stmt(struct ast_state *state, void* _o) if (PyObject_SetAttr(result, state->names, value) == -1) goto failed; Py_DECREF(value); + value = ast2obj_int(state, o->v.Import.is_lazy); + if (!value) goto failed; + if (PyObject_SetAttr(result, state->is_lazy, value) == -1) + goto failed; + Py_DECREF(value); break; case ImportFrom_kind: tp = (PyTypeObject *)state->ImportFrom_type; @@ -9486,6 +9532,11 @@ ast2obj_stmt(struct ast_state *state, void* _o) if (PyObject_SetAttr(result, state->level, value) == -1) goto failed; Py_DECREF(value); + value = ast2obj_int(state, o->v.ImportFrom.is_lazy); + if (!value) goto failed; + if (PyObject_SetAttr(result, state->is_lazy, value) == -1) + goto failed; + Py_DECREF(value); break; case Global_kind: tp = (PyTypeObject *)state->Global_type; @@ -13481,6 +13532,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* } if (isinstance) { asdl_alias_seq* names; + int is_lazy; if (PyObject_GetOptionalAttr(obj, state->names, &tmp) < 0) { return -1; @@ -13520,7 +13572,24 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* } Py_CLEAR(tmp); } - *out = _PyAST_Import(names, lineno, col_offset, end_lineno, + if (PyObject_GetOptionalAttr(obj, state->is_lazy, &tmp) < 0) { + return -1; + } + if (tmp == NULL || tmp == Py_None) { + Py_CLEAR(tmp); + is_lazy = 0; + } + else { + int res; + if (_Py_EnterRecursiveCall(" while traversing 'Import' node")) { + goto failed; + } + res = obj2ast_int(state, tmp, &is_lazy, arena); + _Py_LeaveRecursiveCall(); + if (res != 0) goto failed; + Py_CLEAR(tmp); + } + *out = _PyAST_Import(names, is_lazy, lineno, col_offset, end_lineno, end_col_offset, arena); if (*out == NULL) goto failed; return 0; @@ -13534,6 +13603,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* identifier module; asdl_alias_seq* names; int level; + int is_lazy; if (PyObject_GetOptionalAttr(obj, state->module, &tmp) < 0) { return -1; @@ -13607,8 +13677,25 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (res != 0) goto failed; Py_CLEAR(tmp); } - *out = _PyAST_ImportFrom(module, names, level, lineno, col_offset, - end_lineno, end_col_offset, arena); + if (PyObject_GetOptionalAttr(obj, state->is_lazy, &tmp) < 0) { + return -1; + } + if (tmp == NULL || tmp == Py_None) { + Py_CLEAR(tmp); + is_lazy = 0; + } + else { + int res; + if (_Py_EnterRecursiveCall(" while traversing 'ImportFrom' node")) { + goto failed; + } + res = obj2ast_int(state, tmp, &is_lazy, arena); + _Py_LeaveRecursiveCall(); + if (res != 0) goto failed; + Py_CLEAR(tmp); + } + *out = _PyAST_ImportFrom(module, names, level, is_lazy, lineno, + col_offset, end_lineno, end_col_offset, arena); if (*out == NULL) goto failed; return 0; } diff --git a/Python/bytecodes.c b/Python/bytecodes.c index f9f14322df0..d176e28ddde 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1758,6 +1758,13 @@ dummy_func( } ERROR_NO_POP(); } + + if (PyLazyImport_CheckExact(v_o)) { + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + v_o = l_v; + ERROR_IF(v_o == NULL); + } } else { /* Slow-path if globals or builtins is not a dict */ @@ -1775,6 +1782,12 @@ dummy_func( ERROR_IF(true); } } + if (PyLazyImport_CheckExact(v_o)) { + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + v_o = l_v; + ERROR_IF(v_o == NULL); + } } } v = PyStackRef_FromPyObjectSteal(v_o); @@ -1784,6 +1797,13 @@ dummy_func( PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); PyObject *v_o = _PyEval_LoadName(tstate, frame, name); ERROR_IF(v_o == NULL); + if (PyLazyImport_CheckExact(v_o)) { + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + v_o = l_v; + ERROR_IF(v_o == NULL); + } + v = PyStackRef_FromPyObjectSteal(v_o); } @@ -1809,7 +1829,18 @@ dummy_func( op(_LOAD_GLOBAL, ( -- res[1])) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); + ERROR_IF(PyStackRef_IsNull(*res)); + + PyObject *res_o = PyStackRef_AsPyObjectBorrow(*res); + if (PyLazyImport_CheckExact(res_o)) { + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, res_o); + Py_DECREF(res_o); + res_o = l_v; + PyStackRef_CLOSE(res[0]); + ERROR_IF(res_o == NULL); + *res = PyStackRef_FromPyObjectSteal(res_o); + } } op(_PUSH_NULL_CONDITIONAL, ( -- null[oparg & 1])) { @@ -2914,10 +2945,18 @@ dummy_func( } inst(IMPORT_NAME, (level, fromlist -- res)) { - PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - PyObject *res_o = _PyEval_ImportName(tstate, frame, name, - PyStackRef_AsPyObjectBorrow(fromlist), - PyStackRef_AsPyObjectBorrow(level)); + PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); + PyObject *res_o; + if (oparg & 0x01) { + res_o = _PyEval_LazyImportName(tstate, BUILTINS(), GLOBALS(), name, + PyStackRef_AsPyObjectBorrow(fromlist), + PyStackRef_AsPyObjectBorrow(level)); + + } else { + res_o = _PyEval_ImportName(tstate, BUILTINS(), GLOBALS(), LOCALS(), name, + PyStackRef_AsPyObjectBorrow(fromlist), + PyStackRef_AsPyObjectBorrow(level)); + } DECREF_INPUTS(); ERROR_IF(res_o == NULL); res = PyStackRef_FromPyObjectSteal(res_o); @@ -2925,7 +2964,13 @@ dummy_func( inst(IMPORT_FROM, (from -- from, res)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + PyObject *res_o; + if (PyLazyImport_CheckExact(PyStackRef_AsPyObjectBorrow(from))) { + res_o = _PyEval_LazyImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + } else { + res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + } + ERROR_IF(res_o == NULL); res = PyStackRef_FromPyObjectSteal(res_o); } diff --git a/Python/ceval.c b/Python/ceval.c index 0ccaacaf3ed..0ac5c18fbf2 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -22,6 +22,7 @@ #include "pycore_interpolation.h" // _PyInterpolation_Build() #include "pycore_intrinsics.h" #include "pycore_jit.h" +#include "pycore_lazyimportobject.h" #include "pycore_list.h" // _PyList_GetItemRef() #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_moduleobject.h" // PyModuleObject @@ -2986,11 +2987,11 @@ _PyEval_SliceIndexNotNone(PyObject *v, Py_ssize_t *pi) } PyObject * -_PyEval_ImportName(PyThreadState *tstate, _PyInterpreterFrame *frame, +_PyEval_ImportName(PyThreadState *tstate, PyObject *builtins, PyObject *globals, PyObject *locals, PyObject *name, PyObject *fromlist, PyObject *level) { PyObject *import_func; - if (PyMapping_GetOptionalItem(frame->f_builtins, &_Py_ID(__import__), &import_func) < 0) { + if (PyMapping_GetOptionalItem(builtins, &_Py_ID(__import__), &import_func) < 0) { return NULL; } if (import_func == NULL) { @@ -2998,7 +2999,6 @@ _PyEval_ImportName(PyThreadState *tstate, _PyInterpreterFrame *frame, return NULL; } - PyObject *locals = frame->f_locals; if (locals == NULL) { locals = Py_None; } @@ -3012,18 +3012,50 @@ _PyEval_ImportName(PyThreadState *tstate, _PyInterpreterFrame *frame, } return PyImport_ImportModuleLevelObject( name, - frame->f_globals, + globals, locals, fromlist, ilevel); } - PyObject* args[5] = {name, frame->f_globals, locals, fromlist, level}; + PyObject* args[5] = {name, globals, locals, fromlist, level}; PyObject *res = PyObject_Vectorcall(import_func, args, 5, NULL); Py_DECREF(import_func); return res; } + +PyObject * +_PyEval_LazyImportName(PyThreadState *tstate, PyObject *builtins, PyObject *globals, + PyObject *name, PyObject *fromlist, PyObject *level) +{ + PyObject *res = NULL; + PyObject *abs_name = NULL; + int ilevel = PyLong_AsInt(level); + if (ilevel == -1 && PyErr_Occurred()) { + goto error; + } + if (ilevel > 0) { + abs_name = _PyImport_ResolveName(tstate, name, globals, ilevel); + if (abs_name == NULL) { + goto error; + } + } else { /* ilevel == 0 */ + if (PyUnicode_GET_LENGTH(name) == 0) { + PyErr_SetString(PyExc_ValueError, "Empty module name"); + goto error; + } + abs_name = name; + Py_INCREF(abs_name); + } + + // TODO: check sys.modules for module + res = _PyLazyImport_New(builtins, abs_name, fromlist); +error: + Py_XDECREF(abs_name); + return res; +} + PyObject * _PyEval_ImportFrom(PyThreadState *tstate, PyObject *v, PyObject *name) { @@ -3192,6 +3224,33 @@ _PyEval_ImportFrom(PyThreadState *tstate, PyObject *v, PyObject *name) return NULL; } +PyObject * +_PyEval_LazyImportFrom(PyThreadState *tstate, PyObject *v, PyObject *name) +{ + assert(PyLazyImport_CheckExact(v)); + assert(name && PyUnicode_Check(name)); + PyObject *ret; + PyLazyImportObject *d = (PyLazyImportObject *)v; + if (d->lz_attr != NULL) { + if (PyUnicode_Check(d->lz_attr)) { + PyObject *from = PyUnicode_FromFormat("%U.%U", d->lz_from, d->lz_attr); + ret = _PyLazyImport_New(d->lz_builtins, from, name); + Py_DECREF(from); + return ret; + } + } else { + Py_ssize_t dot = PyUnicode_FindChar(d->lz_from, '.', 0, PyUnicode_GET_LENGTH(d->lz_from), 1); + if (dot >= 0) { + PyObject *from = PyUnicode_Substring(d->lz_from, 0, dot); + ret = _PyLazyImport_New(d->lz_builtins, from, name); + Py_DECREF(from); + return ret; + } + } + ret = _PyLazyImport_New(d->lz_builtins, d->lz_from, name); + return ret; +} + #define CANNOT_CATCH_MSG "catching classes that do not inherit from "\ "BaseException is not allowed" diff --git a/Python/codegen.c b/Python/codegen.c index c4109fcaa48..bdf894cab8d 100644 --- a/Python/codegen.c +++ b/Python/codegen.c @@ -351,8 +351,8 @@ codegen_addop_o(compiler *c, location loc, #define LOAD_ZERO_SUPER_METHOD -4 static int -codegen_addop_name(compiler *c, location loc, - int opcode, PyObject *dict, PyObject *o) +codegen_addop_name_custom(compiler *c, location loc, + int opcode, PyObject *dict, PyObject *o, int shift, int low) { PyObject *mangled = _PyCompile_MaybeMangle(c, o); if (!mangled) { @@ -363,40 +363,51 @@ codegen_addop_name(compiler *c, location loc, if (arg < 0) { return ERROR; } + ADDOP_I(c, loc, opcode, (arg << shift) | low); + return SUCCESS; +} + +static int +codegen_addop_name(compiler *c, location loc, + int opcode, PyObject *dict, PyObject *o) +{ + int shift = 0, low = 0; if (opcode == LOAD_ATTR) { - arg <<= 1; + shift = 1; } if (opcode == LOAD_METHOD) { opcode = LOAD_ATTR; - arg <<= 1; - arg |= 1; + shift = 1; + low = 1; } if (opcode == LOAD_SUPER_ATTR) { - arg <<= 2; - arg |= 2; + shift = 2; + low = 2; } if (opcode == LOAD_SUPER_METHOD) { opcode = LOAD_SUPER_ATTR; - arg <<= 2; - arg |= 3; + shift = 2; + low = 3; } if (opcode == LOAD_ZERO_SUPER_ATTR) { opcode = LOAD_SUPER_ATTR; - arg <<= 2; + shift = 2; } if (opcode == LOAD_ZERO_SUPER_METHOD) { opcode = LOAD_SUPER_ATTR; - arg <<= 2; - arg |= 1; + shift = 2; + low = 1; } - ADDOP_I(c, loc, opcode, arg); - return SUCCESS; + return codegen_addop_name_custom(c, loc, opcode, dict, o, shift, low); } #define ADDOP_NAME(C, LOC, OP, O, TYPE) \ RETURN_IF_ERROR(codegen_addop_name((C), (LOC), (OP), METADATA(C)->u_ ## TYPE, (O))) -static int +#define ADDOP_NAME_CUSTOM(C, LOC, OP, O, TYPE, SHIFT, LOW) \ + RETURN_IF_ERROR(codegen_addop_name_custom((C), (LOC), (OP), METADATA(C)->u_ ## TYPE, (O), SHIFT, LOW)) + + static int codegen_addop_j(instr_sequence *seq, location loc, int opcode, jump_target_label target) { @@ -2861,7 +2872,13 @@ codegen_import(compiler *c, stmt_ty s) ADDOP_LOAD_CONST(c, loc, zero); ADDOP_LOAD_CONST(c, loc, Py_None); - ADDOP_NAME(c, loc, IMPORT_NAME, alias->name, names); + if (s->v.Import.is_lazy) { + // TODO: SyntaxError when not in module scope + ADDOP_NAME_CUSTOM(c, loc, IMPORT_NAME, alias->name, names, 2, 1); + } else { + // TODO: If in try/except, set 2nd bit + ADDOP_NAME_CUSTOM(c, loc, IMPORT_NAME, alias->name, names, 2, 0); + } if (alias->asname) { r = codegen_import_as(c, loc, alias->name, alias->asname); @@ -2907,12 +2924,15 @@ codegen_from_import(compiler *c, stmt_ty s) ADDOP_LOAD_CONST_NEW(c, LOC(s), names); + identifier from = &_Py_STR(empty); if (s->v.ImportFrom.module) { - ADDOP_NAME(c, LOC(s), IMPORT_NAME, s->v.ImportFrom.module, names); + from = s->v.ImportFrom.module; } - else { - _Py_DECLARE_STR(empty, ""); - ADDOP_NAME(c, LOC(s), IMPORT_NAME, &_Py_STR(empty), names); + if (s->v.ImportFrom.is_lazy) { + // TODO: SyntaxError when not in module scope + ADDOP_NAME_CUSTOM(c, LOC(s), IMPORT_NAME, from, names, 2, 1); + } else { + ADDOP_NAME_CUSTOM(c, LOC(s), IMPORT_NAME, from, names, 2, 0); } for (Py_ssize_t i = 0; i < n; i++) { alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 0e4d8646376..80204512f25 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -2458,6 +2458,16 @@ if (v_o == NULL) { JUMP_TO_ERROR(); } + if (PyLazyImport_CheckExact(v_o)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + v_o = l_v; + if (v_o == NULL) { + JUMP_TO_ERROR(); + } + } v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; @@ -2476,6 +2486,21 @@ if (PyStackRef_IsNull(*res)) { JUMP_TO_ERROR(); } + PyObject *res_o = PyStackRef_AsPyObjectBorrow(*res); + if (PyLazyImport_CheckExact(res_o)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, res_o); + Py_DECREF(res_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + res_o = l_v; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(res[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } + *res = PyStackRef_FromPyObjectSteal(res_o); + } stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; @@ -4120,11 +4145,22 @@ oparg = CURRENT_OPARG(); fromlist = stack_pointer[-1]; level = stack_pointer[-2]; - PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); + PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); + PyObject *res_o; + if (oparg & 0x01) { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_LazyImportName(tstate, BUILTINS(), GLOBALS(), name, + PyStackRef_AsPyObjectBorrow(fromlist), + PyStackRef_AsPyObjectBorrow(level)); + stack_pointer = _PyFrame_GetStackPointer(frame); + } else { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_ImportName(tstate, BUILTINS(), GLOBALS(), LOCALS(), name, + PyStackRef_AsPyObjectBorrow(fromlist), + PyStackRef_AsPyObjectBorrow(level)); + stack_pointer = _PyFrame_GetStackPointer(frame); + } _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyEval_ImportName(tstate, frame, name, - PyStackRef_AsPyObjectBorrow(fromlist), - PyStackRef_AsPyObjectBorrow(level)); _PyStackRef tmp = fromlist; fromlist = PyStackRef_NULL; stack_pointer[-1] = fromlist; @@ -4152,9 +4188,16 @@ oparg = CURRENT_OPARG(); from = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o; + if (PyLazyImport_CheckExact(PyStackRef_AsPyObjectBorrow(from))) { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_LazyImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + stack_pointer = _PyFrame_GetStackPointer(frame); + } else { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + stack_pointer = _PyFrame_GetStackPointer(frame); + } if (res_o == NULL) { JUMP_TO_ERROR(); } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 79328a7b725..ccb346cf062 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -6188,9 +6188,16 @@ _PyStackRef res; from = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o; + if (PyLazyImport_CheckExact(PyStackRef_AsPyObjectBorrow(from))) { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_LazyImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + stack_pointer = _PyFrame_GetStackPointer(frame); + } else { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); + stack_pointer = _PyFrame_GetStackPointer(frame); + } if (res_o == NULL) { JUMP_TO_LABEL(error); } @@ -6214,11 +6221,22 @@ _PyStackRef res; fromlist = stack_pointer[-1]; level = stack_pointer[-2]; - PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); + PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); + PyObject *res_o; + if (oparg & 0x01) { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_LazyImportName(tstate, BUILTINS(), GLOBALS(), name, + PyStackRef_AsPyObjectBorrow(fromlist), + PyStackRef_AsPyObjectBorrow(level)); + stack_pointer = _PyFrame_GetStackPointer(frame); + } else { + _PyFrame_SetStackPointer(frame, stack_pointer); + res_o = _PyEval_ImportName(tstate, BUILTINS(), GLOBALS(), LOCALS(), name, + PyStackRef_AsPyObjectBorrow(fromlist), + PyStackRef_AsPyObjectBorrow(level)); + stack_pointer = _PyFrame_GetStackPointer(frame); + } _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyEval_ImportName(tstate, frame, name, - PyStackRef_AsPyObjectBorrow(fromlist), - PyStackRef_AsPyObjectBorrow(level)); _PyStackRef tmp = fromlist; fromlist = PyStackRef_NULL; stack_pointer[-1] = fromlist; @@ -9074,6 +9092,16 @@ } JUMP_TO_LABEL(error); } + if (PyLazyImport_CheckExact(v_o)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + v_o = l_v; + if (v_o == NULL) { + JUMP_TO_LABEL(error); + } + } } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -9098,6 +9126,16 @@ JUMP_TO_LABEL(error); } } + if (PyLazyImport_CheckExact(v_o)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + v_o = l_v; + if (v_o == NULL) { + JUMP_TO_LABEL(error); + } + } } } v = PyStackRef_FromPyObjectSteal(v_o); @@ -9150,6 +9188,21 @@ if (PyStackRef_IsNull(*res)) { JUMP_TO_LABEL(error); } + PyObject *res_o = PyStackRef_AsPyObjectBorrow(*res); + if (PyLazyImport_CheckExact(res_o)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, res_o); + Py_DECREF(res_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + res_o = l_v; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(res[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_LABEL(error); + } + *res = PyStackRef_FromPyObjectSteal(res_o); + } } // _PUSH_NULL_CONDITIONAL { @@ -9350,6 +9403,16 @@ if (v_o == NULL) { JUMP_TO_LABEL(error); } + if (PyLazyImport_CheckExact(v_o)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *l_v = _PyImport_LoadLazyImportTstate(tstate, v_o); + Py_DECREF(v_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + v_o = l_v; + if (v_o == NULL) { + JUMP_TO_LABEL(error); + } + } v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; diff --git a/Python/import.c b/Python/import.c index d01c4d47828..b9b913243f6 100644 --- a/Python/import.c +++ b/Python/import.c @@ -7,6 +7,8 @@ #include "pycore_import.h" // _PyImport_BootstrapImp() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // struct _import_runtime_state +#include "pycore_long.h" // _PyLong_GetZero +#include "pycore_lazyimportobject.h" #include "pycore_magic_number.h" // PYC_MAGIC_NUMBER_TOKEN #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "pycore_namespace.h" // _PyNamespace_Type @@ -3670,6 +3672,101 @@ resolve_name(PyThreadState *tstate, PyObject *name, PyObject *globals, int level return NULL; } +PyObject * +_PyImport_ResolveName(PyThreadState *tstate, PyObject *name, PyObject *globals, int level) +{ + return resolve_name(tstate, name, globals, level); +} + +PyObject * +_PyImport_LoadLazyImportTstate(PyThreadState *tstate, PyObject *lazy_import) +{ + PyObject *obj = NULL; + PyObject *fromlist = NULL; + assert(lazy_import != NULL); + assert(PyLazyImport_CheckExact(lazy_import)); + PyObject *state_dict = _PyThreadState_GetDict(tstate); + assert(state_dict != NULL); + + PyLazyImportObject *lz = (PyLazyImportObject *)lazy_import; + + Py_ssize_t dot = -1; + int full = 0; + if (lz->lz_attr != NULL) { + full = 1; + } + if (!full) { + dot = PyUnicode_FindChar(lz->lz_from, '.', 0, PyUnicode_GET_LENGTH(lz->lz_from), 1); + } + if (dot < 0) { + full = 1; + } + + if (lz->lz_attr != NULL) { + if (PyUnicode_Check(lz->lz_attr)) { + fromlist = PyTuple_New(1); + if (fromlist == NULL) { + goto error; + } + Py_INCREF(lz->lz_attr); + PyTuple_SET_ITEM(fromlist, 0, lz->lz_attr); + } else { + Py_INCREF(lz->lz_attr); + fromlist = lz->lz_attr; + } + } + + PyObject *globals = PyEval_GetGlobals(); + + if (full) { + obj = _PyEval_ImportName(tstate, + lz->lz_builtins, + globals, + globals, + lz->lz_from, + fromlist, + _PyLong_GetZero()); + } else { + PyObject *name = PyUnicode_Substring(lz->lz_from, 0, dot); + if (name == NULL) { + goto error; + } + obj = _PyEval_ImportName(tstate, + lz->lz_builtins, + globals, + globals, + name, + fromlist, + _PyLong_GetZero()); + Py_DECREF(name); + } + + if (obj == NULL) { + goto error; + } + + if (lz->lz_attr != NULL && PyUnicode_Check(lz->lz_attr)) { + PyObject *from = obj; + obj = _PyEval_ImportFrom(tstate, from, lz->lz_attr); + Py_DECREF(from); + if (obj == NULL) { + goto error; + } + } + + assert(!PyLazyImport_CheckExact(obj)); + + goto ok; + +error: + Py_XDECREF(obj); + obj = NULL; + +ok: + Py_XDECREF(fromlist); + return obj; +} + static PyObject * import_find_and_load(PyThreadState *tstate, PyObject *abs_name) {