gh-140576: Fixed crash produced by lexer in case of dedented zero byte (#140583)

This commit is contained in:
Mikhail Efimov 2025-10-29 16:27:35 +03:00 committed by GitHub
parent 6ff62ac4fb
commit 8706167474
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 6 additions and 0 deletions

View file

@ -3183,6 +3183,7 @@ def get_tokens(string):
f'__{
x:d
}__'""",
" a\n\x00",
]:
with self.subTest(case=case):
self.assertRaises(tokenize.TokenError, get_tokens, case)

View file

@ -0,0 +1,2 @@
Fixed crash in :func:`tokenize.generate_tokens` in case of
specific incorrect input. Patch by Mikhail Efimov.

View file

@ -539,6 +539,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
return MAKE_TOKEN(ERRORTOKEN);
}
}
else if (c == EOF && PyErr_Occurred()) {
return MAKE_TOKEN(ERRORTOKEN);
}
else {
break;
}