Index: lib2to3/pytree.py =================================================================== --- lib2to3/pytree.py (revision 80572) +++ lib2to3/pytree.py (working copy) @@ -289,8 +289,7 @@ for node in child.post_order(): yield node - @property - def prefix(self): + def _prefix_getter(self): """ The whitespace and comments preceding this node in the input. """ @@ -298,11 +297,12 @@ return "" return self.children[0].prefix - @prefix.setter - def prefix(self, prefix): + def _prefix_setter(self, prefix): if self.children: self.children[0].prefix = prefix + prefix = property(_prefix_getter, _prefix_setter) + def set_child(self, i, child): """ Equivalent to 'node.children[i] = child'. This method also sets the @@ -390,18 +390,17 @@ """Return a pre-order iterator for the tree.""" yield self - @property - def prefix(self): + def _prefix_getter(self): """ The whitespace and comments preceding this token in the input. """ return self._prefix - @prefix.setter - def prefix(self, prefix): + def _prefix_setter(self, prefix): self.changed() self._prefix = prefix + prefix = property(_prefix_getter, _prefix_setter) def convert(gr, raw_node): """ Index: lib2to3/tests/test_parser.py =================================================================== --- lib2to3/tests/test_parser.py (revision 80572) +++ lib2to3/tests/test_parser.py (working copy) @@ -6,13 +6,14 @@ test_grammar.py files from both Python 2 and Python 3. """ +from __future__ import with_statement + # Testing imports from . import support from .support import driver, test_dir # Python imports import os -import io import sys # Local imports @@ -156,8 +157,9 @@ encoding = tokenize.detect_encoding(fp.readline)[0] self.assertTrue(encoding is not None, "can't detect encoding for %s" % filepath) - with io.open(filepath, "r", encoding=encoding) as fp: + with open(filepath, "r") as fp: source = fp.read() + source = source.decode(encoding) tree = driver.parse_string(source) new = unicode(tree) if diff(filepath, new, encoding): @@ -203,9 +205,9 @@ def diff(fn, result, encoding): - f = io.open("@", "w", encoding=encoding) + f = open("@", "w") try: - f.write(result) + f.write(result.encode(encoding)) finally: f.close() try: Index: lib2to3/tests/test_refactor.py =================================================================== --- lib2to3/tests/test_refactor.py (revision 80572) +++ lib2to3/tests/test_refactor.py (working copy) @@ -2,6 +2,8 @@ Unit tests for refactor.py. """ +from __future__ import with_statement + import sys import os import codecs Index: lib2to3/tests/test_pytree.py =================================================================== --- lib2to3/tests/test_pytree.py (revision 80572) +++ lib2to3/tests/test_pytree.py (working copy) @@ -9,6 +9,9 @@ especially when debugging a test. """ +from __future__ import with_statement + +import sys import warnings # Testing imports @@ -28,20 +31,22 @@ """Unit tests for nodes (Base, Leaf, Node).""" - def test_deprecated_prefix_methods(self): - l = pytree.Leaf(100, "foo") - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always", DeprecationWarning) - self.assertEqual(l.get_prefix(), "") - l.set_prefix("hi") - self.assertEqual(l.prefix, "hi") - self.assertEqual(len(w), 2) - for warning in w: - self.assertTrue(warning.category is DeprecationWarning) - self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \ - "use the prefix property") - self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \ - "use the prefix property") + if sys.version_info >= (2,6): + # warnings.catch_warnings is new in 2.6. + def test_deprecated_prefix_methods(self): + l = pytree.Leaf(100, "foo") + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always", DeprecationWarning) + self.assertEqual(l.get_prefix(), "") + l.set_prefix("hi") + self.assertEqual(l.prefix, "hi") + self.assertEqual(len(w), 2) + for warning in w: + self.assertTrue(warning.category is DeprecationWarning) + self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \ + "use the prefix property") + self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \ + "use the prefix property") def test_instantiate_base(self): if __debug__: Index: lib2to3/patcomp.py =================================================================== --- lib2to3/patcomp.py (revision 80572) +++ lib2to3/patcomp.py (working copy) @@ -57,7 +57,7 @@ tokens = tokenize_wrapper(input) try: root = self.driver.parse_tokens(tokens, debug=debug) - except parse.ParseError as e: + except parse.ParseError, e: raise PatternSyntaxError(str(e)) return self.compile_node(root) Index: lib2to3/pgen2/tokenize.py =================================================================== --- lib2to3/pgen2/tokenize.py (revision 80572) +++ lib2to3/pgen2/tokenize.py (working copy) @@ -38,6 +38,13 @@ "generate_tokens", "untokenize"] del token +try: + bytes +except NameError: + # Support bytes type in Python <= 2.5, so 2to3 turns itself into + # valid Python 3 code. + bytes = str + def group(*choices): return '(' + '|'.join(choices) + ')' def any(*choices): return group(*choices) + '*' def maybe(*choices): return group(*choices) + '?' @@ -265,7 +272,7 @@ try: return readline() except StopIteration: - return b'' + return bytes() def find_cookie(line): try: Index: lib2to3/main.py =================================================================== --- lib2to3/main.py (revision 80572) +++ lib2to3/main.py (working copy) @@ -2,6 +2,8 @@ Main program for 2to3. """ +from __future__ import with_statement + import sys import os import difflib Index: lib2to3/refactor.py =================================================================== --- lib2to3/refactor.py (revision 80572) +++ lib2to3/refactor.py (working copy) @@ -8,6 +8,8 @@ provides infrastructure to write your own refactoring tool. """ +from __future__ import with_statement + __author__ = "Guido van Rossum " @@ -126,7 +128,7 @@ have_docstring = False gen = tokenize.generate_tokens(StringIO.StringIO(source).readline) def advance(): - tok = next(gen) + tok = gen.next() return tok[0], tok[1] ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) features = set() Index: lib2to3/fixes/fix_operator.py =================================================================== --- lib2to3/fixes/fix_operator.py (revision 80572) +++ lib2to3/fixes/fix_operator.py (working copy) @@ -14,10 +14,10 @@ func = "'(' func=any ')'" PATTERN = """ power< module='operator' - trailer< '.' {methods} > trailer< {func} > > + trailer< '.' %(methods)s > trailer< %(func)s > > | - power< {methods} trailer< {func} > > - """.format(methods=methods, func=func) + power< %(methods)s trailer< %(func)s > > + """ % dict(methods=methods, func=func) def transform(self, node, results): method = results["method"][0]