Add future 0.18.2

This commit is contained in:
JonnyWong16 2020-03-23 18:45:35 -07:00
parent 08c8ee0774
commit fa97d3f88d
210 changed files with 43159 additions and 0 deletions

View file

@ -0,0 +1 @@
# empty to make this a package

View file

@ -0,0 +1,54 @@
import sys
from lib2to3 import refactor
# The original set of these fixes comes from lib3to2 (https://bitbucket.org/amentajo/lib3to2):
fix_names = set([
'libpasteurize.fixes.fix_add_all__future__imports', # from __future__ import absolute_import etc. on separate lines
'libpasteurize.fixes.fix_add_future_standard_library_import', # we force adding this import for now, even if it doesn't seem necessary to the fix_future_standard_library fixer, for ease of testing
# 'libfuturize.fixes.fix_order___future__imports', # consolidates to a single line to simplify testing -- UNFINISHED
'libpasteurize.fixes.fix_future_builtins', # adds "from future.builtins import *"
'libfuturize.fixes.fix_future_standard_library', # adds "from future import standard_library"
'libpasteurize.fixes.fix_annotations',
# 'libpasteurize.fixes.fix_bitlength', # ints have this in Py2.7
# 'libpasteurize.fixes.fix_bool', # need a decorator or Mixin
# 'libpasteurize.fixes.fix_bytes', # leave bytes as bytes
# 'libpasteurize.fixes.fix_classdecorator', # available in
# Py2.6+
# 'libpasteurize.fixes.fix_collections', hmmm ...
# 'libpasteurize.fixes.fix_dctsetcomp', # avail in Py27
'libpasteurize.fixes.fix_division', # yes
# 'libpasteurize.fixes.fix_except', # avail in Py2.6+
# 'libpasteurize.fixes.fix_features', # ?
'libpasteurize.fixes.fix_fullargspec',
# 'libpasteurize.fixes.fix_funcattrs',
'libpasteurize.fixes.fix_getcwd',
'libpasteurize.fixes.fix_imports', # adds "from future import standard_library"
'libpasteurize.fixes.fix_imports2',
# 'libpasteurize.fixes.fix_input',
# 'libpasteurize.fixes.fix_int',
# 'libpasteurize.fixes.fix_intern',
# 'libpasteurize.fixes.fix_itertools',
'libpasteurize.fixes.fix_kwargs', # yes, we want this
# 'libpasteurize.fixes.fix_memoryview',
# 'libpasteurize.fixes.fix_metaclass', # write a custom handler for
# this
# 'libpasteurize.fixes.fix_methodattrs', # __func__ and __self__ seem to be defined on Py2.7 already
'libpasteurize.fixes.fix_newstyle', # yes, we want this: explicit inheritance from object. Without new-style classes in Py2, super() will break etc.
# 'libpasteurize.fixes.fix_next', # use a decorator for this
# 'libpasteurize.fixes.fix_numliterals', # prob not
# 'libpasteurize.fixes.fix_open', # huh?
# 'libpasteurize.fixes.fix_print', # no way
'libpasteurize.fixes.fix_printfunction', # adds __future__ import print_function
# 'libpasteurize.fixes.fix_raise_', # TODO: get this working!
# 'libpasteurize.fixes.fix_range', # nope
# 'libpasteurize.fixes.fix_reduce',
# 'libpasteurize.fixes.fix_setliteral',
# 'libpasteurize.fixes.fix_str',
# 'libpasteurize.fixes.fix_super', # maybe, if our magic super() isn't robust enough
'libpasteurize.fixes.fix_throw', # yes, if Py3 supports it
# 'libpasteurize.fixes.fix_unittest',
'libpasteurize.fixes.fix_unpacking', # yes, this is useful
# 'libpasteurize.fixes.fix_with' # way out of date
])

View file

@ -0,0 +1,57 @@
u"""
Base classes for features that are backwards-incompatible.
Usage:
features = Features()
features.add(Feature("py3k_feature", "power< 'py3k' any* >", "2.7"))
PATTERN = features.PATTERN
"""
pattern_unformatted = u"%s=%s" # name=pattern, for dict lookups
message_unformatted = u"""
%s is only supported in Python %s and above."""
class Feature(object):
u"""
A feature has a name, a pattern, and a minimum version of Python 2.x
required to use the feature (or 3.x if there is no backwards-compatible
version of 2.x)
"""
def __init__(self, name, PATTERN, version):
self.name = name
self._pattern = PATTERN
self.version = version
def message_text(self):
u"""
Format the above text with the name and minimum version required.
"""
return message_unformatted % (self.name, self.version)
class Features(set):
u"""
A set of features that generates a pattern for the features it contains.
This set will act like a mapping in that we map names to patterns.
"""
mapping = {}
def update_mapping(self):
u"""
Called every time we care about the mapping of names to features.
"""
self.mapping = dict([(f.name, f) for f in iter(self)])
@property
def PATTERN(self):
u"""
Uses the mapping of names to features to return a PATTERN suitable
for using the lib2to3 patcomp.
"""
self.update_mapping()
return u" |\n".join([pattern_unformatted % (f.name, f._pattern) for f in iter(self)])
def __getitem__(self, key):
u"""
Implement a simple mapping to get patterns from names.
"""
return self.mapping[key]

View file

@ -0,0 +1,24 @@
"""
Fixer for adding:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
This is done when converting from Py3 to both Py3/Py2.
"""
from lib2to3 import fixer_base
from libfuturize.fixer_util import future_import
class FixAddAllFutureImports(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "file_input"
run_order = 1
def transform(self, node, results):
future_import(u"absolute_import", node)
future_import(u"division", node)
future_import(u"print_function", node)
future_import(u"unicode_literals", node)

View file

@ -0,0 +1,37 @@
"""
For the ``future`` package.
Adds this import line::
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, list, map, next, object, oct, open, pow,
range, round, str, super, zip)
to a module, irrespective of whether each definition is used.
Adds these imports after any other imports (in an initial block of them).
"""
from __future__ import unicode_literals
from lib2to3 import fixer_base
from libfuturize.fixer_util import touch_import_top
class FixAddAllFutureBuiltins(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "file_input"
run_order = 1
def transform(self, node, results):
# import_str = """(ascii, bytes, chr, dict, filter, hex, input,
# int, list, map, next, object, oct, open, pow,
# range, round, str, super, zip)"""
touch_import_top(u'builtins', '*', node)
# builtins = """ascii bytes chr dict filter hex input
# int list map next object oct open pow
# range round str super zip"""
# for builtin in sorted(builtins.split(), reverse=True):
# touch_import_top(u'builtins', builtin, node)

View file

@ -0,0 +1,23 @@
"""
For the ``future`` package.
Adds this import line:
from future import standard_library
after any __future__ imports but before any other imports. Doesn't actually
change the imports to Py3 style.
"""
from lib2to3 import fixer_base
from libfuturize.fixer_util import touch_import_top
class FixAddFutureStandardLibraryImport(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "file_input"
run_order = 8
def transform(self, node, results):
# TODO: add a blank line between any __future__ imports and this?
touch_import_top(u'future', u'standard_library', node)
# TODO: also add standard_library.install_hooks()

View file

@ -0,0 +1,48 @@
u"""
Fixer to remove function annotations
"""
from lib2to3 import fixer_base
from lib2to3.pgen2 import token
from lib2to3.fixer_util import syms
warning_text = u"Removing function annotations completely."
def param_without_annotations(node):
return node.children[0]
class FixAnnotations(fixer_base.BaseFix):
warned = False
def warn_once(self, node, reason):
if not self.warned:
self.warned = True
self.warning(node, reason=reason)
PATTERN = u"""
funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* >
"""
def transform(self, node, results):
u"""
This just strips annotations from the funcdef completely.
"""
params = results.get(u"params")
ret = results.get(u"ret")
if ret is not None:
assert ret.prev_sibling.type == token.RARROW, u"Invalid return annotation"
self.warn_once(node, reason=warning_text)
ret.prev_sibling.remove()
ret.remove()
if params is None: return
if params.type == syms.typedargslist:
# more than one param in a typedargslist
for param in params.children:
if param.type == syms.tname:
self.warn_once(node, reason=warning_text)
param.replace(param_without_annotations(param))
elif params.type == syms.tname:
# one param
self.warn_once(node, reason=warning_text)
params.replace(param_without_annotations(params))

View file

@ -0,0 +1,28 @@
u"""
Fixer for division: from __future__ import division if needed
"""
from lib2to3 import fixer_base
from libfuturize.fixer_util import token, future_import
def match_division(node):
u"""
__future__.division redefines the meaning of a single slash for division,
so we match that and only that.
"""
slash = token.SLASH
return node.type == slash and not node.next_sibling.type == slash and \
not node.prev_sibling.type == slash
class FixDivision(fixer_base.BaseFix):
run_order = 4 # this seems to be ignored?
def match(self, node):
u"""
Since the tree needs to be fixed once and only once if and only if it
matches, then we can start discarding matches after we make the first.
"""
return match_division(node)
def transform(self, node, results):
future_import(u"division", node)

View file

@ -0,0 +1,86 @@
u"""
Warn about features that are not present in Python 2.5, giving a message that
points to the earliest version of Python 2.x (or 3.x, if none) that supports it
"""
from .feature_base import Feature, Features
from lib2to3 import fixer_base
FEATURES = [
#(FeatureName,
# FeaturePattern,
# FeatureMinVersion,
#),
(u"memoryview",
u"power < 'memoryview' trailer < '(' any* ')' > any* >",
u"2.7",
),
(u"numbers",
u"""import_from< 'from' 'numbers' 'import' any* > |
import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""",
u"2.6",
),
(u"abc",
u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > |
import_from< 'from' 'abc' 'import' any* >""",
u"2.6",
),
(u"io",
u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > |
import_from< 'from' 'io' 'import' any* >""",
u"2.6",
),
(u"bin",
u"power< 'bin' trailer< '(' any* ')' > any* >",
u"2.6",
),
(u"formatting",
u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >",
u"2.6",
),
(u"nonlocal",
u"global_stmt< 'nonlocal' any* >",
u"3.0",
),
(u"with_traceback",
u"trailer< '.' 'with_traceback' >",
u"3.0",
),
]
class FixFeatures(fixer_base.BaseFix):
run_order = 9 # Wait until all other fixers have run to check for these
# To avoid spamming, we only want to warn for each feature once.
features_warned = set()
# Build features from the list above
features = Features([Feature(name, pattern, version) for \
name, pattern, version in FEATURES])
PATTERN = features.PATTERN
def match(self, node):
to_ret = super(FixFeatures, self).match(node)
# We want the mapping only to tell us the node's specific information.
try:
del to_ret[u'node']
except Exception:
# We want it to delete the 'node' from the results
# if it's there, so we don't care if it fails for normal reasons.
pass
return to_ret
def transform(self, node, results):
for feature_name in results:
if feature_name in self.features_warned:
continue
else:
curr_feature = self.features[feature_name]
if curr_feature.version >= u"3":
fail = self.cannot_convert
else:
fail = self.warning
fail(node, reason=curr_feature.message_text())
self.features_warned.add(feature_name)

View file

@ -0,0 +1,16 @@
u"""
Fixer for getfullargspec -> getargspec
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name
warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent."
class FixFullargspec(fixer_base.BaseFix):
PATTERN = u"'getfullargspec'"
def transform(self, node, results):
self.warning(node, warn_msg)
return Name(u"getargspec", prefix=node.prefix)

View file

@ -0,0 +1,46 @@
"""
Adds this import line:
from builtins import XYZ
for each of the functions XYZ that is used in the module.
"""
from __future__ import unicode_literals
from lib2to3 import fixer_base
from lib2to3.pygram import python_symbols as syms
from lib2to3.fixer_util import Name, Call, in_special_context
from libfuturize.fixer_util import touch_import_top
# All builtins are:
# from future.builtins.iterators import (filter, map, zip)
# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super)
# from future.types import (bytes, dict, int, range, str)
# We don't need isinstance any more.
replaced_builtins = '''filter map zip
ascii chr hex input next oct open round super
bytes dict int range str'''.split()
expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins])
class FixFutureBuiltins(fixer_base.BaseFix):
BM_compatible = True
run_order = 9
# Currently we only match uses as a function. This doesn't match e.g.:
# if isinstance(s, str):
# ...
PATTERN = """
power<
({0}) trailer< '(' args=[any] ')' >
rest=any* >
""".format(expression)
def transform(self, node, results):
name = results["name"]
touch_import_top(u'builtins', name.value, node)
# name.replace(Name(u"input", prefix=name.prefix))

View file

@ -0,0 +1,26 @@
u"""
Fixer for os.getcwd() -> os.getcwdu().
Also warns about "from os import getcwd", suggesting the above form.
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name
class FixGetcwd(fixer_base.BaseFix):
PATTERN = u"""
power< 'os' trailer< dot='.' name='getcwd' > any* >
|
import_from< 'from' 'os' 'import' bad='getcwd' >
"""
def transform(self, node, results):
if u"name" in results:
name = results[u"name"]
name.replace(Name(u"getcwdu", prefix=name.prefix))
elif u"bad" in results:
# Can't convert to getcwdu and then expect to catch every use.
self.cannot_convert(node, u"import os, use os.getcwd() instead.")
return
else:
raise ValueError(u"For some reason, the pattern matcher failed.")

View file

@ -0,0 +1,112 @@
u"""
Fixer for standard library imports renamed in Python 3
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import
from lib2to3.pygram import python_symbols as syms
from lib2to3.pgen2 import token
from lib2to3.pytree import Node, Leaf
from libfuturize.fixer_util import touch_import_top
# from ..fixer_util import NameImport
# used in simple_mapping_to_pattern()
MAPPING = {u"reprlib": u"repr",
u"winreg": u"_winreg",
u"configparser": u"ConfigParser",
u"copyreg": u"copy_reg",
u"queue": u"Queue",
u"socketserver": u"SocketServer",
u"_markupbase": u"markupbase",
u"test.support": u"test.test_support",
u"dbm.bsd": u"dbhash",
u"dbm.ndbm": u"dbm",
u"dbm.dumb": u"dumbdbm",
u"dbm.gnu": u"gdbm",
u"html.parser": u"HTMLParser",
u"html.entities": u"htmlentitydefs",
u"http.client": u"httplib",
u"http.cookies": u"Cookie",
u"http.cookiejar": u"cookielib",
# "tkinter": "Tkinter",
u"tkinter.dialog": u"Dialog",
u"tkinter._fix": u"FixTk",
u"tkinter.scrolledtext": u"ScrolledText",
u"tkinter.tix": u"Tix",
u"tkinter.constants": u"Tkconstants",
u"tkinter.dnd": u"Tkdnd",
u"tkinter.__init__": u"Tkinter",
u"tkinter.colorchooser": u"tkColorChooser",
u"tkinter.commondialog": u"tkCommonDialog",
u"tkinter.font": u"tkFont",
u"tkinter.ttk": u"ttk",
u"tkinter.messagebox": u"tkMessageBox",
u"tkinter.turtle": u"turtle",
u"urllib.robotparser": u"robotparser",
u"xmlrpc.client": u"xmlrpclib",
u"builtins": u"__builtin__",
}
# generic strings to help build patterns
# these variables mean (with http.client.HTTPConnection as an example):
# name = http
# attr = client
# used = HTTPConnection
# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match)
# helps match 'queue', as in 'from queue import ...'
simple_name_match = u"name='%s'"
# helps match 'client', to be used if client has been imported from http
subname_match = u"attr='%s'"
# helps match 'http.client', as in 'import urllib.request'
dotted_name_match = u"dotted_name=dotted_name< %s '.' %s >"
# helps match 'queue', as in 'queue.Queue(...)'
power_onename_match = u"%s"
# helps match 'http.client', as in 'http.client.HTTPConnection(...)'
power_twoname_match = u"power< %s trailer< '.' %s > any* >"
# helps match 'client.HTTPConnection', if 'client' has been imported from http
power_subname_match = u"power< %s any* >"
# helps match 'from http.client import HTTPConnection'
from_import_match = u"from_import=import_from< 'from' %s 'import' imported=any >"
# helps match 'from http import client'
from_import_submod_match = u"from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >"
# helps match 'import urllib.request'
name_import_match = u"name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >"
# helps match 'import http.client, winreg'
multiple_name_import_match = u"name_import=import_name< 'import' dotted_as_names< names=any* > >"
def all_patterns(name):
u"""
Accepts a string and returns a pattern of possible patterns involving that name
Called by simple_mapping_to_pattern for each name in the mapping it receives.
"""
# i_ denotes an import-like node
# u_ denotes a node that appears to be a usage of the name
if u'.' in name:
name, attr = name.split(u'.', 1)
simple_name = simple_name_match % (name)
simple_attr = subname_match % (attr)
dotted_name = dotted_name_match % (simple_name, simple_attr)
i_from = from_import_match % (dotted_name)
i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr)
i_name = name_import_match % (dotted_name, dotted_name)
u_name = power_twoname_match % (simple_name, simple_attr)
u_subname = power_subname_match % (simple_attr)
return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname))
else:
simple_name = simple_name_match % (name)
i_name = name_import_match % (simple_name, simple_name)
i_from = from_import_match % (simple_name)
u_name = power_onename_match % (simple_name)
return u' | \n'.join((i_name, i_from, u_name))
class FixImports(fixer_base.BaseFix):
PATTERN = u' | \n'.join([all_patterns(name) for name in MAPPING])
PATTERN = u' | \n'.join((PATTERN, multiple_name_import_match))
def transform(self, node, results):
touch_import_top(u'future', u'standard_library', node)

View file

@ -0,0 +1,174 @@
u"""
Fixer for complicated imports
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma
from libfuturize.fixer_util import touch_import_top
TK_BASE_NAMES = (u'ACTIVE', u'ALL', u'ANCHOR', u'ARC',u'BASELINE', u'BEVEL', u'BOTH',
u'BOTTOM', u'BROWSE', u'BUTT', u'CASCADE', u'CENTER', u'CHAR',
u'CHECKBUTTON', u'CHORD', u'COMMAND', u'CURRENT', u'DISABLED',
u'DOTBOX', u'E', u'END', u'EW', u'EXCEPTION', u'EXTENDED', u'FALSE',
u'FIRST', u'FLAT', u'GROOVE', u'HIDDEN', u'HORIZONTAL', u'INSERT',
u'INSIDE', u'LAST', u'LEFT', u'MITER', u'MOVETO', u'MULTIPLE', u'N',
u'NE', u'NO', u'NONE', u'NORMAL', u'NS', u'NSEW', u'NUMERIC', u'NW',
u'OFF', u'ON', u'OUTSIDE', u'PAGES', u'PIESLICE', u'PROJECTING',
u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT',
u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST',
u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes',
u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE',
u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES',
u'wantobjects')
PY2MODULES = {
u'urllib2' : (
u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler',
u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler',
u'FTPHandler', u'FileHandler', u'HTTPBasicAuthHandler',
u'HTTPCookieProcessor', u'HTTPDefaultErrorHandler',
u'HTTPDigestAuthHandler', u'HTTPError', u'HTTPErrorProcessor',
u'HTTPHandler', u'HTTPPasswordMgr',
u'HTTPPasswordMgrWithDefaultRealm', u'HTTPRedirectHandler',
u'HTTPSHandler', u'OpenerDirector', u'ProxyBasicAuthHandler',
u'ProxyDigestAuthHandler', u'ProxyHandler', u'Request',
u'StringIO', u'URLError', u'UnknownHandler', u'addinfourl',
u'build_opener', u'install_opener', u'parse_http_list',
u'parse_keqv_list', u'randombytes', u'request_host', u'urlopen'),
u'urllib' : (
u'ContentTooShortError', u'FancyURLopener',u'URLopener',
u'basejoin', u'ftperrors', u'getproxies',
u'getproxies_environment', u'localhost', u'pathname2url',
u'quote', u'quote_plus', u'splitattr', u'splithost',
u'splitnport', u'splitpasswd', u'splitport', u'splitquery',
u'splittag', u'splittype', u'splituser', u'splitvalue',
u'thishost', u'unquote', u'unquote_plus', u'unwrap',
u'url2pathname', u'urlcleanup', u'urlencode', u'urlopen',
u'urlretrieve',),
u'urlparse' : (
u'parse_qs', u'parse_qsl', u'urldefrag', u'urljoin',
u'urlparse', u'urlsplit', u'urlunparse', u'urlunsplit'),
u'dbm' : (
u'ndbm', u'gnu', u'dumb'),
u'anydbm' : (
u'error', u'open'),
u'whichdb' : (
u'whichdb',),
u'BaseHTTPServer' : (
u'BaseHTTPRequestHandler', u'HTTPServer'),
u'CGIHTTPServer' : (
u'CGIHTTPRequestHandler',),
u'SimpleHTTPServer' : (
u'SimpleHTTPRequestHandler',),
u'FileDialog' : TK_BASE_NAMES + (
u'FileDialog', u'LoadFileDialog', u'SaveFileDialog',
u'dialogstates', u'test'),
u'tkFileDialog' : (
u'Directory', u'Open', u'SaveAs', u'_Dialog', u'askdirectory',
u'askopenfile', u'askopenfilename', u'askopenfilenames',
u'askopenfiles', u'asksaveasfile', u'asksaveasfilename'),
u'SimpleDialog' : TK_BASE_NAMES + (
u'SimpleDialog',),
u'tkSimpleDialog' : TK_BASE_NAMES + (
u'askfloat', u'askinteger', u'askstring', u'Dialog'),
u'SimpleXMLRPCServer' : (
u'CGIXMLRPCRequestHandler', u'SimpleXMLRPCDispatcher',
u'SimpleXMLRPCRequestHandler', u'SimpleXMLRPCServer',
u'list_public_methods', u'remove_duplicates',
u'resolve_dotted_attribute'),
u'DocXMLRPCServer' : (
u'DocCGIXMLRPCRequestHandler', u'DocXMLRPCRequestHandler',
u'DocXMLRPCServer', u'ServerHTMLDoc',u'XMLRPCDocGenerator'),
}
MAPPING = { u'urllib.request' :
(u'urllib2', u'urllib'),
u'urllib.error' :
(u'urllib2', u'urllib'),
u'urllib.parse' :
(u'urllib2', u'urllib', u'urlparse'),
u'dbm.__init__' :
(u'anydbm', u'whichdb'),
u'http.server' :
(u'CGIHTTPServer', u'SimpleHTTPServer', u'BaseHTTPServer'),
u'tkinter.filedialog' :
(u'tkFileDialog', u'FileDialog'),
u'tkinter.simpledialog' :
(u'tkSimpleDialog', u'SimpleDialog'),
u'xmlrpc.server' :
(u'DocXMLRPCServer', u'SimpleXMLRPCServer'),
}
# helps match 'http', as in 'from http.server import ...'
simple_name = u"name='%s'"
# helps match 'server', as in 'from http.server import ...'
simple_attr = u"attr='%s'"
# helps match 'HTTPServer', as in 'from http.server import HTTPServer'
simple_using = u"using='%s'"
# helps match 'urllib.request', as in 'import urllib.request'
dotted_name = u"dotted_name=dotted_name< %s '.' %s >"
# helps match 'http.server', as in 'http.server.HTTPServer(...)'
power_twoname = u"pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >"
# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)'
power_onename = u"pow=power< %s trailer< '.' using=any > any* >"
# helps match 'from http.server import HTTPServer'
# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler'
# also helps match 'from http.server import *'
from_import = u"from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >"
# helps match 'import urllib.request'
name_import = u"name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >"
#############
# WON'T FIX #
#############
# helps match 'import urllib.request as name'
name_import_rename = u"name_import_rename=dotted_as_name< %s 'as' renamed=any >"
# helps match 'from http import server'
from_import_rename = u"from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >"
def all_modules_subpattern():
u"""
Builds a pattern for all toplevel names
(urllib, http, etc)
"""
names_dot_attrs = [mod.split(u".") for mod in MAPPING]
ret = u"( " + u" | ".join([dotted_name % (simple_name % (mod[0]),
simple_attr % (mod[1])) for mod in names_dot_attrs])
ret += u" | "
ret += u" | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == u"__init__"]) + u" )"
return ret
def build_import_pattern(mapping1, mapping2):
u"""
mapping1: A dict mapping py3k modules to all possible py2k replacements
mapping2: A dict mapping py2k modules to the things they do
This builds a HUGE pattern to match all ways that things can be imported
"""
# py3k: urllib.request, py2k: ('urllib2', 'urllib')
yield from_import % (all_modules_subpattern())
for py3k, py2k in mapping1.items():
name, attr = py3k.split(u'.')
s_name = simple_name % (name)
s_attr = simple_attr % (attr)
d_name = dotted_name % (s_name, s_attr)
yield name_import % (d_name)
yield power_twoname % (s_name, s_attr)
if attr == u'__init__':
yield name_import % (s_name)
yield power_onename % (s_name)
yield name_import_rename % (d_name)
yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr)
class FixImports2(fixer_base.BaseFix):
run_order = 4
PATTERN = u" | \n".join(build_import_pattern(MAPPING, PY2MODULES))
def transform(self, node, results):
touch_import_top(u'future', u'standard_library', node)

View file

@ -0,0 +1,147 @@
u"""
Fixer for Python 3 function parameter syntax
This fixer is rather sensitive to incorrect py3k syntax.
"""
# Note: "relevant" parameters are parameters following the first STAR in the list.
from lib2to3 import fixer_base
from lib2to3.fixer_util import token, String, Newline, Comma, Name
from libfuturize.fixer_util import indentation, suitify, DoubleStar
_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s"
_else_template = u"else: %(name)s = %(default)s"
_kwargs_default_name = u"_3to2kwargs"
def gen_params(raw_params):
u"""
Generator that yields tuples of (name, default_value) for each parameter in the list
If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
"""
assert raw_params[0].type == token.STAR and len(raw_params) > 2
curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
max_idx = len(raw_params)
while curr_idx < max_idx:
curr_item = raw_params[curr_idx]
prev_item = curr_item.prev_sibling
if curr_item.type != token.NAME:
curr_idx += 1
continue
if prev_item is not None and prev_item.type == token.DOUBLESTAR:
break
name = curr_item.value
nxt = curr_item.next_sibling
if nxt is not None and nxt.type == token.EQUAL:
default_value = nxt.next_sibling
curr_idx += 2
else:
default_value = None
yield (name, default_value)
curr_idx += 1
def remove_params(raw_params, kwargs_default=_kwargs_default_name):
u"""
Removes all keyword-only args from the params list and a bare star, if any.
Does not add the kwargs dict if needed.
Returns True if more action is needed, False if not
(more action is needed if no kwargs dict exists)
"""
assert raw_params[0].type == token.STAR
if raw_params[1].type == token.COMMA:
raw_params[0].remove()
raw_params[1].remove()
kw_params = raw_params[2:]
else:
kw_params = raw_params[3:]
for param in kw_params:
if param.type != token.DOUBLESTAR:
param.remove()
else:
return False
else:
return True
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
u"""
Returns string with the name of the kwargs dict if the params after the first star need fixing
Otherwise returns empty string
"""
found_kwargs = False
needs_fix = False
for t in raw_params[2:]:
if t.type == token.COMMA:
# Commas are irrelevant at this stage.
continue
elif t.type == token.NAME and not found_kwargs:
# Keyword-only argument: definitely need to fix.
needs_fix = True
elif t.type == token.NAME and found_kwargs:
# Return 'foobar' of **foobar, if needed.
return t.value if needs_fix else u''
elif t.type == token.DOUBLESTAR:
# Found either '*' from **foobar.
found_kwargs = True
else:
# Never found **foobar. Return a synthetic name, if needed.
return kwargs_default if needs_fix else u''
class FixKwargs(fixer_base.BaseFix):
run_order = 7 # Run after function annotations are removed
PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
def transform(self, node, results):
params_rawlist = results[u"params"]
for i, item in enumerate(params_rawlist):
if item.type == token.STAR:
params_rawlist = params_rawlist[i:]
break
else:
return
# params is guaranteed to be a list starting with *.
# if fixing is needed, there will be at least 3 items in this list:
# [STAR, COMMA, NAME] is the minimum that we need to worry about.
new_kwargs = needs_fixing(params_rawlist)
# new_kwargs is the name of the kwargs dictionary.
if not new_kwargs:
return
suitify(node)
# At this point, params_rawlist is guaranteed to be a list
# beginning with a star that includes at least one keyword-only param
# e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
# [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
# Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
# Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
# We need to insert our new stuff before the first_stmt and change the
# first_stmt's prefix.
suite = node.children[4]
first_stmt = suite.children[2]
ident = indentation(first_stmt)
for name, default_value in gen_params(params_rawlist):
if default_value is None:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident))
else:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident))
suite.insert_child(2, Newline())
suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident))
first_stmt.prefix = ident
suite.children[2].prefix = u""
# Now, we need to fix up the list of params.
must_add_kwargs = remove_params(params_rawlist)
if must_add_kwargs:
arglist = results[u'arglist']
if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
arglist.append_child(Comma())
arglist.append_child(DoubleStar(prefix=u" "))
arglist.append_child(Name(new_kwargs))

View file

@ -0,0 +1,21 @@
u"""
Fixer for memoryview(s) -> buffer(s).
Explicit because some memoryview methods are invalid on buffer objects.
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name
class FixMemoryview(fixer_base.BaseFix):
explicit = True # User must specify that they want this.
PATTERN = u"""
power< name='memoryview' trailer< '(' [any] ')' >
rest=any* >
"""
def transform(self, node, results):
name = results[u"name"]
name.replace(Name(u"buffer", prefix=name.prefix))

View file

@ -0,0 +1,78 @@
u"""
Fixer for (metaclass=X) -> __metaclass__ = X
Some semantics (see PEP 3115) may be altered in the translation."""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, syms, Node, Leaf, Newline, find_root
from lib2to3.pygram import token
from libfuturize.fixer_util import indentation, suitify
# from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify
def has_metaclass(parent):
results = None
for node in parent.children:
kids = node.children
if node.type == syms.argument:
if kids[0] == Leaf(token.NAME, u"metaclass") and \
kids[1] == Leaf(token.EQUAL, u"=") and \
kids[2]:
#Hack to avoid "class X(=):" with this case.
results = [node] + kids
break
elif node.type == syms.arglist:
# Argument list... loop through it looking for:
# Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)]
for child in node.children:
if results: break
if child.type == token.COMMA:
#Store the last comma, which precedes the metaclass
comma = child
elif type(child) == Node:
meta = equal = name = None
for arg in child.children:
if arg == Leaf(token.NAME, u"metaclass"):
#We have the (metaclass) part
meta = arg
elif meta and arg == Leaf(token.EQUAL, u"="):
#We have the (metaclass=) part
equal = arg
elif meta and equal:
#Here we go, we have (metaclass=X)
name = arg
results = (comma, meta, equal, name)
break
return results
class FixMetaclass(fixer_base.BaseFix):
PATTERN = u"""
classdef<any*>
"""
def transform(self, node, results):
meta_results = has_metaclass(node)
if not meta_results: return
for meta in meta_results:
meta.remove()
target = Leaf(token.NAME, u"__metaclass__")
equal = Leaf(token.EQUAL, u"=", prefix=u" ")
# meta is the last item in what was returned by has_metaclass(): name
name = meta
name.prefix = u" "
stmt_node = Node(syms.atom, [target, equal, name])
suitify(node)
for item in node.children:
if item.type == syms.suite:
for stmt in item.children:
if stmt.type == token.INDENT:
# Insert, in reverse order, the statement, a newline,
# and an indent right after the first indented line
loc = item.children.index(stmt) + 1
# Keep consistent indentation form
ident = Leaf(token.INDENT, stmt.value)
item.insert_child(loc, ident)
item.insert_child(loc, Newline())
item.insert_child(loc, stmt_node)
break

View file

@ -0,0 +1,33 @@
u"""
Fixer for "class Foo: ..." -> "class Foo(object): ..."
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
# Match:
# class Blah:
# and:
# class Blah():
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
if (node.children[idx-2].value == '(' and
node.children[idx-1].value == ')'):
del node.children[idx-2:idx]
idx -= 2
insert_object(node, idx)
touch_import_top(u'builtins', 'object', node)

View file

@ -0,0 +1,43 @@
u"""
Fixer for:
it.__next__() -> it.next().
next(it) -> it.next().
"""
from lib2to3.pgen2 import token
from lib2to3.pygram import python_symbols as syms
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Call, find_binding, Attr
bind_warning = u"Calls to builtin next() possibly shadowed by global binding"
class FixNext(fixer_base.BaseFix):
PATTERN = u"""
power< base=any+ trailer< '.' attr='__next__' > any* >
|
power< head='next' trailer< '(' arg=any ')' > any* >
|
classdef< 'class' base=any+ ':'
suite< any*
funcdef< 'def'
attr='__next__'
parameters< '(' NAME ')' > any+ >
any* > >
"""
def transform(self, node, results):
assert results
base = results.get(u"base")
attr = results.get(u"attr")
head = results.get(u"head")
arg_ = results.get(u"arg")
if arg_:
arg = arg_.clone()
head.replace(Attr(Name(unicode(arg),prefix=head.prefix),
Name(u"next")))
arg_.remove()
elif base:
attr.replace(Name(u"next", prefix=attr.prefix))

View file

@ -0,0 +1,17 @@
u"""
Fixer for print: from __future__ import print_function.
"""
from lib2to3 import fixer_base
from libfuturize.fixer_util import future_import
class FixPrintfunction(fixer_base.BaseFix):
# explicit = True
PATTERN = u"""
power< 'print' trailer < '(' any* ')' > any* >
"""
def transform(self, node, results):
future_import(u"print_function", node)

View file

@ -0,0 +1,25 @@
u"""Fixer for 'raise E(V).with_traceback(T)' -> 'raise E, V, T'"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Comma, Node, Leaf, token, syms
class FixRaise(fixer_base.BaseFix):
PATTERN = u"""
raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >]
[trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >"""
def transform(self, node, results):
name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc"))
chain = results.get(u"chain")
if chain is not None:
self.warning(node, u"explicit exception chaining is not supported in Python 2")
chain.prev_sibling.remove()
chain.remove()
if trc is not None:
val = val[0] if val else Leaf(token.NAME, u"None")
val.prefix = trc.prefix = u" "
kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(),
val.clone(), Comma(), trc.clone()]
raise_stmt = Node(syms.raise_stmt, kids)
node.replace(raise_stmt)

View file

@ -0,0 +1,35 @@
u"""Fixer for
raise E(V).with_traceback(T)
to:
from future.utils import raise_
...
raise_(E, V, T)
TODO: FIXME!!
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Comma, Node, Leaf, token, syms
class FixRaise(fixer_base.BaseFix):
PATTERN = u"""
raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >]
[trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >"""
def transform(self, node, results):
FIXME
name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc"))
chain = results.get(u"chain")
if chain is not None:
self.warning(node, u"explicit exception chaining is not supported in Python 2")
chain.prev_sibling.remove()
chain.remove()
if trc is not None:
val = val[0] if val else Leaf(token.NAME, u"None")
val.prefix = trc.prefix = u" "
kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(),
val.clone(), Comma(), trc.clone()]
raise_stmt = Node(syms.raise_stmt, kids)
node.replace(raise_stmt)

View file

@ -0,0 +1,23 @@
u"""Fixer for 'g.throw(E(V).with_traceback(T))' -> 'g.throw(E, V, T)'"""
from lib2to3 import fixer_base
from lib2to3.pytree import Node, Leaf
from lib2to3.pgen2 import token
from lib2to3.fixer_util import Comma
class FixThrow(fixer_base.BaseFix):
PATTERN = u"""
power< any trailer< '.' 'throw' >
trailer< '(' args=power< exc=any trailer< '(' val=any* ')' >
trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' > > ')' > >
"""
def transform(self, node, results):
syms = self.syms
exc, val, trc = (results[u"exc"], results[u"val"], results[u"trc"])
val = val[0] if val else Leaf(token.NAME, u"None")
val.prefix = trc.prefix = u" "
kids = [exc.clone(), Comma(), val.clone(), Comma(), trc.clone()]
args = results[u"args"]
args.children = kids

View file

@ -0,0 +1,120 @@
u"""
Fixer for:
(a,)* *b (,c)* [,] = s
for (a,)* *b (,c)* [,] in d: ...
"""
from lib2to3 import fixer_base
from itertools import count
from lib2to3.fixer_util import (Assign, Comma, Call, Newline, Name,
Number, token, syms, Node, Leaf)
from libfuturize.fixer_util import indentation, suitify, commatize
# from libfuturize.fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf
def assignment_source(num_pre, num_post, LISTNAME, ITERNAME):
u"""
Accepts num_pre and num_post, which are counts of values
before and after the starg (not including the starg)
Returns a source fit for Assign() from fixer_util
"""
children = []
pre = unicode(num_pre)
post = unicode(num_post)
# This code builds the assignment source from lib2to3 tree primitives.
# It's not very readable, but it seems like the most correct way to do it.
if num_pre > 0:
pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])])
children.append(pre_part)
children.append(Leaf(token.PLUS, u"+", prefix=u" "))
main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])])
children.append(main_part)
if num_post > 0:
children.append(Leaf(token.PLUS, u"+", prefix=u" "))
post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])])
children.append(post_part)
source = Node(syms.arith_expr, children)
return source
class FixUnpacking(fixer_base.BaseFix):
PATTERN = u"""
expl=expr_stmt< testlist_star_expr<
pre=(any ',')*
star_expr< '*' name=NAME >
post=(',' any)* [','] > '=' source=any > |
impl=for_stmt< 'for' lst=exprlist<
pre=(any ',')*
star_expr< '*' name=NAME >
post=(',' any)* [','] > 'in' it=any ':' suite=any>"""
def fix_explicit_context(self, node, results):
pre, name, post, source = (results.get(n) for n in (u"pre", u"name", u"post", u"source"))
pre = [n.clone() for n in pre if n.type == token.NAME]
name.prefix = u" "
post = [n.clone() for n in post if n.type == token.NAME]
target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
# to make the special-case fix for "*z, = ..." correct with the least
# amount of modification, make the left-side into a guaranteed tuple
target.append(Comma())
source.prefix = u""
setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()]))
power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
return setup_line, power_line
def fix_implicit_context(self, node, results):
u"""
Only example of the implicit context is
a for loop, so only fix that.
"""
pre, name, post, it = (results.get(n) for n in (u"pre", u"name", u"post", u"it"))
pre = [n.clone() for n in pre if n.type == token.NAME]
name.prefix = u" "
post = [n.clone() for n in post if n.type == token.NAME]
target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
# to make the special-case fix for "*z, = ..." correct with the least
# amount of modification, make the left-side into a guaranteed tuple
target.append(Comma())
source = it.clone()
source.prefix = u""
setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [Name(self.ITERNAME)]))
power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
return setup_line, power_line
def transform(self, node, results):
u"""
a,b,c,d,e,f,*g,h,i = range(100) changes to
_3to2list = list(range(100))
a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:]
and
for a,b,*c,d,e in iter_of_iters: do_stuff changes to
for _3to2iter in iter_of_iters:
_3to2list = list(_3to2iter)
a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:]
do_stuff
"""
self.LISTNAME = self.new_name(u"_3to2list")
self.ITERNAME = self.new_name(u"_3to2iter")
expl, impl = results.get(u"expl"), results.get(u"impl")
if expl is not None:
setup_line, power_line = self.fix_explicit_context(node, results)
setup_line.prefix = expl.prefix
power_line.prefix = indentation(expl.parent)
setup_line.append_child(Newline())
parent = node.parent
i = node.remove()
parent.insert_child(i, power_line)
parent.insert_child(i, setup_line)
elif impl is not None:
setup_line, power_line = self.fix_implicit_context(node, results)
suitify(node)
suite = [k for k in node.children if k.type == syms.suite][0]
setup_line.prefix = u""
power_line.prefix = suite.children[1].value
suite.children[2].prefix = indentation(suite.children[2])
suite.insert_child(2, Newline())
suite.insert_child(2, power_line)
suite.insert_child(2, Newline())
suite.insert_child(2, setup_line)
results.get(u"lst").replace(Name(self.ITERNAME, prefix=u" "))

204
lib/libpasteurize/main.py Normal file
View file

@ -0,0 +1,204 @@
"""
pasteurize: automatic conversion of Python 3 code to clean 2/3 code
===================================================================
``pasteurize`` attempts to convert existing Python 3 code into source-compatible
Python 2 and 3 code.
Use it like this on Python 3 code:
$ pasteurize --verbose mypython3script.py
This removes any Py3-only syntax (e.g. new metaclasses) and adds these
import lines:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future import standard_library
standard_library.install_hooks()
from builtins import *
To write changes to the files, use the -w flag.
It also adds any other wrappers needed for Py2/3 compatibility.
Note that separate stages are not available (or needed) when converting from
Python 3 with ``pasteurize`` as they are when converting from Python 2 with
``futurize``.
The --all-imports option forces adding all ``__future__`` imports,
``builtins`` imports, and standard library aliases, even if they don't
seem necessary for the current state of each module. (This can simplify
testing, and can reduce the need to think about Py2 compatibility when editing
the code further.)
"""
from __future__ import (absolute_import, print_function, unicode_literals)
import sys
import logging
import optparse
from lib2to3.main import main, warn, StdoutRefactoringTool
from lib2to3 import refactor
from future import __version__
from libpasteurize.fixes import fix_names
def main(args=None):
"""Main program.
Returns a suggested exit status (0, 1, 2).
"""
# Set up option parser
parser = optparse.OptionParser(usage="pasteurize [options] file|dir ...")
parser.add_option("-V", "--version", action="store_true",
help="Report the version number of pasteurize")
parser.add_option("-a", "--all-imports", action="store_true",
help="Adds all __future__ and future imports to each module")
parser.add_option("-f", "--fix", action="append", default=[],
help="Each FIX specifies a transformation; default: all")
parser.add_option("-j", "--processes", action="store", default=1,
type="int", help="Run 2to3 concurrently")
parser.add_option("-x", "--nofix", action="append", default=[],
help="Prevent a fixer from being run.")
parser.add_option("-l", "--list-fixes", action="store_true",
help="List available transformations")
# parser.add_option("-p", "--print-function", action="store_true",
# help="Modify the grammar so that print() is a function")
parser.add_option("-v", "--verbose", action="store_true",
help="More verbose logging")
parser.add_option("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
parser.add_option("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_option("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files.")
# Parse command line arguments
refactor_stdin = False
flags = {}
options, args = parser.parse_args(args)
fixer_pkg = 'libpasteurize.fixes'
avail_fixes = fix_names
flags["print_function"] = True
if not options.write and options.no_diffs:
warn("not writing files and not printing diffs; that's not very useful")
if not options.write and options.nobackups:
parser.error("Can't use -n without -w")
if options.version:
print(__version__)
return 0
if options.list_fixes:
print("Available transformations for the -f/--fix option:")
for fixname in sorted(avail_fixes):
print(fixname)
if not args:
return 0
if not args:
print("At least one file or directory argument required.",
file=sys.stderr)
print("Use --help to show usage.", file=sys.stderr)
return 2
if "-" in args:
refactor_stdin = True
if options.write:
print("Can't write to stdin.", file=sys.stderr)
return 2
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
unwanted_fixes = set()
for fix in options.nofix:
if ".fix_" in fix:
unwanted_fixes.add(fix)
else:
# Infer the full module name for the fixer.
# First ensure that no names clash (e.g.
# lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
found = [f for f in avail_fixes
if f.endswith('fix_{0}'.format(fix))]
if len(found) > 1:
print("Ambiguous fixer name. Choose a fully qualified "
"module name instead from these:\n" +
"\n".join(" " + myf for myf in found),
file=sys.stderr)
return 2
elif len(found) == 0:
print("Unknown fixer. Use --list-fixes or -l for a list.",
file=sys.stderr)
return 2
unwanted_fixes.add(found[0])
extra_fixes = set()
if options.all_imports:
prefix = 'libpasteurize.fixes.'
extra_fixes.add(prefix + 'fix_add_all__future__imports')
extra_fixes.add(prefix + 'fix_add_future_standard_library_import')
extra_fixes.add(prefix + 'fix_add_all_future_builtins')
explicit = set()
if options.fix:
all_present = False
for fix in options.fix:
if fix == 'all':
all_present = True
else:
if ".fix_" in fix:
explicit.add(fix)
else:
# Infer the full module name for the fixer.
# First ensure that no names clash (e.g.
# lib2to3.fixes.fix_blah and libpasteurize.fixes.fix_blah):
found = [f for f in avail_fixes
if f.endswith('fix_{0}'.format(fix))]
if len(found) > 1:
print("Ambiguous fixer name. Choose a fully qualified "
"module name instead from these:\n" +
"\n".join(" " + myf for myf in found),
file=sys.stderr)
return 2
elif len(found) == 0:
print("Unknown fixer. Use --list-fixes or -l for a list.",
file=sys.stderr)
return 2
explicit.add(found[0])
if len(explicit & unwanted_fixes) > 0:
print("Conflicting usage: the following fixers have been "
"simultaneously requested and disallowed:\n" +
"\n".join(" " + myf for myf in (explicit & unwanted_fixes)),
file=sys.stderr)
return 2
requested = avail_fixes.union(explicit) if all_present else explicit
else:
requested = avail_fixes.union(explicit)
fixer_names = requested | extra_fixes - unwanted_fixes
# Initialize the refactoring tool
rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(),
options.nobackups, not options.no_diffs)
# Refactor all files and directories passed as arguments
if not rt.errors:
if refactor_stdin:
rt.refactor_stdin()
else:
try:
rt.refactor(args, options.write, None,
options.processes)
except refactor.MultiprocessingUnsupported:
assert options.processes > 1
print("Sorry, -j isn't " \
"supported on this platform.", file=sys.stderr)
return 1
rt.summarize()
# Return error status (0 if rt.errors is zero)
return int(bool(rt.errors))