mirror of
https://github.com/python/cpython.git
synced 2025-12-08 06:10:17 +00:00
[3.14] gh-140576: Fixed crash produced by lexer in case of dedented zero byte (GH-140583) (#140757)
gh-140576: Fixed crash produced by lexer in case of dedented zero byte (GH-140583)
(cherry picked from commit 8706167474)
Co-authored-by: Mikhail Efimov <efimov.mikhail@gmail.com>
This commit is contained in:
parent
e0f54a0bda
commit
c45ab05a3a
3 changed files with 6 additions and 0 deletions
|
|
@ -3183,6 +3183,7 @@ def get_tokens(string):
|
||||||
f'__{
|
f'__{
|
||||||
x:d
|
x:d
|
||||||
}__'""",
|
}__'""",
|
||||||
|
" a\n\x00",
|
||||||
]:
|
]:
|
||||||
with self.subTest(case=case):
|
with self.subTest(case=case):
|
||||||
self.assertRaises(tokenize.TokenError, get_tokens, case)
|
self.assertRaises(tokenize.TokenError, get_tokens, case)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
Fixed crash in :func:`tokenize.generate_tokens` in case of
|
||||||
|
specific incorrect input. Patch by Mikhail Efimov.
|
||||||
|
|
@ -539,6 +539,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
|
||||||
return MAKE_TOKEN(ERRORTOKEN);
|
return MAKE_TOKEN(ERRORTOKEN);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (c == EOF && PyErr_Occurred()) {
|
||||||
|
return MAKE_TOKEN(ERRORTOKEN);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue