mirror of
https://github.com/python/cpython.git
synced 2025-10-26 11:14:33 +00:00
Merged revisions 80936 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k
................
r80936 | benjamin.peterson | 2010-05-07 14:10:11 -0500 (Fri, 07 May 2010) | 76 lines
Merged revisions 80934 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
................
r80934 | benjamin.peterson | 2010-05-07 13:58:23 -0500 (Fri, 07 May 2010) | 69 lines
Merged revisions 79911,79916-79917,80018,80418,80572-80573,80635-80639,80668,80922 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3
........
r79911 | benjamin.peterson | 2010-04-09 15:38:53 -0500 (Fri, 09 Apr 2010) | 1 line
use absolute import
........
r79916 | benjamin.peterson | 2010-04-09 16:05:21 -0500 (Fri, 09 Apr 2010) | 1 line
generalize detection of __future__ imports and attach them to the tree
........
r79917 | benjamin.peterson | 2010-04-09 16:11:44 -0500 (Fri, 09 Apr 2010) | 1 line
don't try to 'fix' relative imports when absolute_import is enabled #8858
........
r80018 | benjamin.peterson | 2010-04-12 16:12:12 -0500 (Mon, 12 Apr 2010) | 4 lines
prevent diffs from being mangled is multiprocess mode #6409
Patch by George Boutsioukis.
........
r80418 | benjamin.peterson | 2010-04-23 16:00:03 -0500 (Fri, 23 Apr 2010) | 1 line
remove unhelpful description
........
r80572 | benjamin.peterson | 2010-04-27 20:33:54 -0500 (Tue, 27 Apr 2010) | 1 line
use unicode literals
........
r80573 | jeffrey.yasskin | 2010-04-27 23:08:27 -0500 (Tue, 27 Apr 2010) | 6 lines
Don't transform imports that are already relative. 2to3 turned
from . import refactor
into
from .. import refactor
which broke the transformation of 2to3 itself.
........
r80635 | benjamin.peterson | 2010-04-29 16:02:23 -0500 (Thu, 29 Apr 2010) | 1 line
remove imports
........
r80636 | benjamin.peterson | 2010-04-29 16:02:41 -0500 (Thu, 29 Apr 2010) | 1 line
unicode literal
........
r80637 | benjamin.peterson | 2010-04-29 16:03:42 -0500 (Thu, 29 Apr 2010) | 1 line
must pass a string to Number
........
r80638 | benjamin.peterson | 2010-04-29 16:05:34 -0500 (Thu, 29 Apr 2010) | 1 line
unicode literals
........
r80639 | benjamin.peterson | 2010-04-29 16:06:09 -0500 (Thu, 29 Apr 2010) | 1 line
pass string to Number
........
r80668 | jeffrey.yasskin | 2010-04-30 18:02:47 -0500 (Fri, 30 Apr 2010) | 4 lines
Make 2to3 run under Python 2.5 so that the benchmark suite at
http://hg.python.org/benchmarks/ can use it and still run on implementations
that haven't gotten to 2.6 yet. Fixes issue 8566.
........
r80922 | benjamin.peterson | 2010-05-07 11:06:25 -0500 (Fri, 07 May 2010) | 1 line
prevent xrange transformation from wrapping range calls it produces in list
........
................
................
This commit is contained in:
parent
3a3bba34e2
commit
c9e833faa0
13 changed files with 165 additions and 88 deletions
|
|
@ -8,6 +8,8 @@
|
|||
provides infrastructure to write your own refactoring tool.
|
||||
"""
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
__author__ = "Guido van Rossum <guido@python.org>"
|
||||
|
||||
|
||||
|
|
@ -122,13 +124,14 @@ def _to_system_newlines(input):
|
|||
_to_system_newlines = _identity
|
||||
|
||||
|
||||
def _detect_future_print(source):
|
||||
def _detect_future_features(source):
|
||||
have_docstring = False
|
||||
gen = tokenize.generate_tokens(io.StringIO(source).readline)
|
||||
def advance():
|
||||
tok = next(gen)
|
||||
return tok[0], tok[1]
|
||||
ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
|
||||
features = set()
|
||||
try:
|
||||
while True:
|
||||
tp, value = advance()
|
||||
|
|
@ -140,26 +143,25 @@ def advance():
|
|||
have_docstring = True
|
||||
elif tp == token.NAME and value == "from":
|
||||
tp, value = advance()
|
||||
if tp != token.NAME and value != "__future__":
|
||||
if tp != token.NAME or value != "__future__":
|
||||
break
|
||||
tp, value = advance()
|
||||
if tp != token.NAME and value != "import":
|
||||
if tp != token.NAME or value != "import":
|
||||
break
|
||||
tp, value = advance()
|
||||
if tp == token.OP and value == "(":
|
||||
tp, value = advance()
|
||||
while tp == token.NAME:
|
||||
if value == "print_function":
|
||||
return True
|
||||
features.add(value)
|
||||
tp, value = advance()
|
||||
if tp != token.OP and value != ",":
|
||||
if tp != token.OP or value != ",":
|
||||
break
|
||||
tp, value = advance()
|
||||
else:
|
||||
break
|
||||
except StopIteration:
|
||||
pass
|
||||
return False
|
||||
return frozenset(features)
|
||||
|
||||
|
||||
class FixerError(Exception):
|
||||
|
|
@ -341,7 +343,8 @@ def refactor_string(self, data, name):
|
|||
An AST corresponding to the refactored input stream; None if
|
||||
there were errors during the parse.
|
||||
"""
|
||||
if _detect_future_print(data):
|
||||
features = _detect_future_features(data)
|
||||
if "print_function" in features:
|
||||
self.driver.grammar = pygram.python_grammar_no_print_statement
|
||||
try:
|
||||
tree = self.driver.parse_string(data)
|
||||
|
|
@ -351,6 +354,7 @@ def refactor_string(self, data, name):
|
|||
return
|
||||
finally:
|
||||
self.driver.grammar = self.grammar
|
||||
tree.future_features = features
|
||||
self.log_debug("Refactoring %s", name)
|
||||
self.refactor_tree(tree, name)
|
||||
return tree
|
||||
|
|
@ -605,6 +609,7 @@ class MultiprocessRefactoringTool(RefactoringTool):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
|
||||
self.queue = None
|
||||
self.output_lock = None
|
||||
|
||||
def refactor(self, items, write=False, doctests_only=False,
|
||||
num_processes=1):
|
||||
|
|
@ -618,6 +623,7 @@ def refactor(self, items, write=False, doctests_only=False,
|
|||
if self.queue is not None:
|
||||
raise RuntimeError("already doing multiple processes")
|
||||
self.queue = multiprocessing.JoinableQueue()
|
||||
self.output_lock = multiprocessing.Lock()
|
||||
processes = [multiprocessing.Process(target=self._child)
|
||||
for i in range(num_processes)]
|
||||
try:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue