Index: Lib/token.py =================================================================== --- Lib/token.py (revision 86303) +++ Lib/token.py (working copy) @@ -1,7 +1,7 @@ -#! /usr/bin/env python3 - """Token constants (from "token.h").""" +__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] + # This file is automatically generated; please don't muck it up! # # To update the symbols in this file, 'cd' to the top directory of @@ -68,13 +68,11 @@ NT_OFFSET = 256 #--end constants-- -tok_name = {} -for _name, _value in list(globals().items()): - if type(_value) is type(0): - tok_name[_value] = _name -del _name, _value +tok_name = {value: name + for name, value in globals().items() + if isinstance(value, int)} +__all__.extend(tok_name.values()) - def ISTERMINAL(x): return x < NT_OFFSET @@ -85,7 +83,7 @@ return x == ENDMARKER -def main(): +def _main(): import re import sys args = sys.argv[1:] @@ -139,4 +137,4 @@ if __name__ == "__main__": - main() + _main() Index: Lib/tokenize.py =================================================================== --- Lib/tokenize.py (revision 86303) +++ Lib/tokenize.py (working copy) @@ -32,9 +32,8 @@ cookie_re = re.compile("coding[:=]\s*([-\w.]+)") import token -__all__ = [x for x in dir(token) if not x.startswith("_")] -__all__.extend(["COMMENT", "tokenize", "detect_encoding", "NL", "untokenize", - "ENCODING", "TokenInfo"]) +__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding", + "NL", "untokenize", "ENCODING", "TokenInfo"] del token COMMENT = N_TOKENS