mirror of
https://github.com/python/cpython.git
synced 2025-11-02 06:31:29 +00:00
New, fixed version with proper r"..." and R"..." support from Ka-Ping.
This commit is contained in:
parent
3b631775b2
commit
fefc922cef
1 changed files with 10 additions and 7 deletions
|
|
@ -47,7 +47,7 @@ def maybe(*choices): return apply(group, choices) + '?'
|
||||||
Double = any(r'[^"\\]', r'\\.') + '"'
|
Double = any(r'[^"\\]', r'\\.') + '"'
|
||||||
Single3 = any(r"[^'\\]",r'\\.',r"'[^'\\]",r"'\\.",r"''[^'\\]",r"''\\.") + "'''"
|
Single3 = any(r"[^'\\]",r'\\.',r"'[^'\\]",r"'\\.",r"''[^'\\]",r"''\\.") + "'''"
|
||||||
Double3 = any(r'[^"\\]',r'\\.',r'"[^"\\]',r'"\\.',r'""[^"\\]',r'""\\.') + '"""'
|
Double3 = any(r'[^"\\]',r'\\.',r'"[^"\\]',r'"\\.',r'""[^"\\]',r'""\\.') + '"""'
|
||||||
Triple = group("'''", '"""', "r'''", 'r"""')
|
Triple = group("[rR]?'''", '[rR]?"""')
|
||||||
String = group("[rR]?'" + any(r"[^\n'\\]", r'\\.') + "'",
|
String = group("[rR]?'" + any(r"[^\n'\\]", r'\\.') + "'",
|
||||||
'[rR]?"' + any(r'[^\n"\\]', r'\\.') + '"')
|
'[rR]?"' + any(r'[^\n"\\]', r'\\.') + '"')
|
||||||
|
|
||||||
|
|
@ -60,16 +60,17 @@ def maybe(*choices): return apply(group, choices) + '?'
|
||||||
PlainToken = group(Number, Funny, String, Name)
|
PlainToken = group(Number, Funny, String, Name)
|
||||||
Token = Ignore + PlainToken
|
Token = Ignore + PlainToken
|
||||||
|
|
||||||
ContStr = group("r?'" + any(r'\\.', r"[^\n'\\]") + group("'", r'\\\r?\n'),
|
ContStr = group("[rR]?'" + any(r'\\.', r"[^\n'\\]") + group("'", r'\\\r?\n'),
|
||||||
'r?"' + any(r'\\.', r'[^\n"\\]') + group('"', r'\\\r?\n'))
|
'[rR]?"' + any(r'\\.', r'[^\n"\\]') + group('"', r'\\\r?\n'))
|
||||||
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
|
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
|
||||||
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
|
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
|
||||||
|
|
||||||
tokenprog, pseudoprog, single3prog, double3prog = map(
|
tokenprog, pseudoprog, single3prog, double3prog = map(
|
||||||
re.compile, (Token, PseudoToken, Single3, Double3))
|
re.compile, (Token, PseudoToken, Single3, Double3))
|
||||||
endprogs = {"'": re.compile(Single), '"': re.compile(Double), 'r': None,
|
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
|
||||||
"'''": single3prog, '"""': double3prog,
|
"'''": single3prog, '"""': double3prog,
|
||||||
"r'''": single3prog, 'r"""': double3prog}
|
"r'''": single3prog, 'r"""': double3prog,
|
||||||
|
"R'''": single3prog, 'R"""': double3prog, 'r': None, 'R': None}
|
||||||
|
|
||||||
tabsize = 8
|
tabsize = 8
|
||||||
TokenError = 'TokenError'
|
TokenError = 'TokenError'
|
||||||
|
|
@ -148,7 +149,8 @@ def tokenize(readline, tokeneater=printtoken):
|
||||||
tokeneater(NEWLINE, token, spos, epos, line)
|
tokeneater(NEWLINE, token, spos, epos, line)
|
||||||
elif initial == '#':
|
elif initial == '#':
|
||||||
tokeneater(COMMENT, token, spos, epos, line)
|
tokeneater(COMMENT, token, spos, epos, line)
|
||||||
elif token in ("'''",'"""',"r'''",'r"""'): # triple-quoted
|
elif token in ("'''", '"""', # triple-quoted
|
||||||
|
"r'''", 'r"""', "R'''", 'R"""'):
|
||||||
endprog = endprogs[token]
|
endprog = endprogs[token]
|
||||||
endmatch = endprog.match(line, pos)
|
endmatch = endprog.match(line, pos)
|
||||||
if endmatch: # all on one line
|
if endmatch: # all on one line
|
||||||
|
|
@ -159,7 +161,8 @@ def tokenize(readline, tokeneater=printtoken):
|
||||||
strstart = (lnum, start) # multiple lines
|
strstart = (lnum, start) # multiple lines
|
||||||
contstr = line[start:]
|
contstr = line[start:]
|
||||||
break
|
break
|
||||||
elif initial in ("'", '"') or token[:2] in ("r'", 'r"'):
|
elif initial in ("'", '"') or \
|
||||||
|
token[:2] in ("r'", 'r"', "R'", 'R"'):
|
||||||
if token[-1] == '\n': # continued string
|
if token[-1] == '\n': # continued string
|
||||||
strstart = (lnum, start)
|
strstart = (lnum, start)
|
||||||
endprog = endprogs[initial] or endprogs[token[1]]
|
endprog = endprogs[initial] or endprogs[token[1]]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue