Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code | Sign in
(13229)

Delta Between Two Patch Sets: Lib/compileall.py

Issue 16104: Use multiprocessing in compileall script
Left Patch Set: Created 5 years, 6 months ago
Right Patch Set: Created 5 years ago
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments. Please Sign in to add in-line comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « Doc/library/compileall.rst ('k') | Lib/test/test_compileall.py » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 """Module/script to byte-compile all .py files to .pyc (or .pyo) files. 1 """Module/script to byte-compile all .py files to .pyc (or .pyo) files.
2 2
3 When called as a script with arguments, this compiles the directories 3 When called as a script with arguments, this compiles the directories
4 given as arguments recursively; the -l option prevents it from 4 given as arguments recursively; the -l option prevents it from
5 recursing into directories. 5 recursing into directories.
6 6
7 Without arguments, if compiles all modules on sys.path, without 7 Without arguments, if compiles all modules on sys.path, without
8 recursing into subdirectories. (Even though it should do so for 8 recursing into subdirectories. (Even though it should do so for
9 packages -- for now, you'll have to deal with packages separately.) 9 packages -- for now, you'll have to deal with packages separately.)
10 10
11 See module py_compile for details of the actual byte-compilation. 11 See module py_compile for details of the actual byte-compilation.
12 """ 12 """
13 import os 13 import os
14 import sys 14 import sys
15 import errno
16 import importlib.util 15 import importlib.util
17 import py_compile 16 import py_compile
18 import struct 17 import struct
19 18
20 try: 19 try:
21 from concurrent.futures import ProcessPoolExecutor 20 from concurrent.futures import ProcessPoolExecutor
22 _have_multiprocessing = True
23 except ImportError: 21 except ImportError:
24 _have_multiprocessing = False 22 ProcessPoolExecutor = None
25 from functools import partial 23 from functools import partial
26 24
27 __all__ = ["compile_dir","compile_file","compile_path"] 25 __all__ = ["compile_dir","compile_file","compile_path"]
28 26
29 def _walk_dir(dir, ddir=None, maxlevels=10, quiet=False): 27 def _walk_dir(dir, ddir=None, maxlevels=10, quiet=False):
30 if not quiet: 28 if not quiet:
31 print('Listing {!r}...'.format(dir)) 29 print('Listing {!r}...'.format(dir))
32 try: 30 try:
33 names = os.listdir(dir) 31 names = os.listdir(dir)
34 except OSError: 32 except OSError:
35 print("Can't list {!r}".format(dir)) 33 print("Can't list {!r}".format(dir))
36 names = [] 34 names = []
37 names.sort() 35 names.sort()
38 for name in names: 36 for name in names:
39 if name == '__pycache__': 37 if name == '__pycache__':
40 continue 38 continue
41 fullname = os.path.join(dir, name) 39 fullname = os.path.join(dir, name)
42 if ddir is not None: 40 if ddir is not None:
43 dfile = os.path.join(ddir, name) 41 dfile = os.path.join(ddir, name)
44 else: 42 else:
45 dfile = None 43 dfile = None
46 if not os.path.isdir(fullname): 44 if not os.path.isdir(fullname):
47 yield fullname 45 yield fullname
48 elif (maxlevels > 0 and name != os.curdir and name != os.pardir and 46 elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
49 os.path.isdir(fullname) and not os.path.islink(fullname)): 47 os.path.isdir(fullname) and not os.path.islink(fullname)):
50 yield from _walk_dir(fullname, ddir=dfile, 48 yield from _walk_dir(fullname, ddir=dfile,
51 maxlevels=maxlevels - 1, quiet=quiet) 49 maxlevels=maxlevels - 1, quiet=quiet)
52 50
53 def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None, 51 def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
54 quiet=False, legacy=False, optimize=-1, processes=None): 52 quiet=False, legacy=False, optimize=-1, workers=1):
55 """Byte-compile all modules in the given directory tree. 53 """Byte-compile all modules in the given directory tree.
56 54
57 Arguments (only dir is required): 55 Arguments (only dir is required):
58 56
59 dir: the directory to byte-compile 57 dir: the directory to byte-compile
60 maxlevels: maximum recursion level (default 10) 58 maxlevels: maximum recursion level (default 10)
61 ddir: the directory that will be prepended to the path to the 59 ddir: the directory that will be prepended to the path to the
62 file as it is compiled into each byte-code file. 60 file as it is compiled into each byte-code file.
63 force: if True, force compilation, even if timestamps are up-to-date 61 force: if True, force compilation, even if timestamps are up-to-date
64 quiet: if True, be quiet during compilation 62 quiet: if True, be quiet during compilation
65 legacy: if True, produce legacy pyc paths instead of PEP 3147 paths 63 legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
66 optimize: optimization level or -1 for level of the interpreter 64 optimize: optimization level or -1 for level of the interpreter
67 processes: if given, it will be the number of workers which will 65 workers: maximum number of parallel workers
68 process the given directory.
69 """ 66 """
70 files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels, 67 files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels,
71 ddir=ddir) 68 ddir=ddir)
72 success = 1 69 success = 1
73 if processes is not None: 70 if workers is not None and workers != 1:
74 if not _have_multiprocessing: 71 if workers < 0:
75 raise ValueError('multiprocessing support not available') 72 raise ValueError('workers must be greater or equal to 0')
76 with ProcessPoolExecutor( 73 if ProcessPoolExecutor is None:
77 max_workers=processes) as executor: 74 raise NotImplementedError('multiprocessing support not available')
75
76 workers = workers or os.cpu_count()
brett.cannon 2014/09/12 16:21:59 This can be `workers or None` as that will default
77 with ProcessPoolExecutor(max_workers=workers) as executor:
78 results = executor.map(partial(compile_file, 78 results = executor.map(partial(compile_file,
79 ddir=ddir, force=force, 79 ddir=ddir, force=force,
80 rx=rx, quiet=quiet, 80 rx=rx, quiet=quiet,
81 legacy=legacy, 81 legacy=legacy,
82 optimize=optimize), 82 optimize=optimize),
83 files) 83 files)
84 for result in results: 84 success = min(results, default=1)
85 success = 0 if not result else 1
86 else: 85 else:
87 for file in files: 86 for file in files:
88 if not compile_file(file, ddir, force, rx, quiet, 87 if not compile_file(file, ddir, force, rx, quiet,
89 legacy, optimize): 88 legacy, optimize):
90 success = 0 89 success = 0
91 return success 90 return success
92 91
93 def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, 92 def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False,
94 legacy=False, optimize=-1): 93 legacy=False, optimize=-1):
95 """Byte-compile one file. 94 """Byte-compile one file.
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
191 190
192 def main(): 191 def main():
193 """Script main program.""" 192 """Script main program."""
194 import argparse 193 import argparse
195 194
196 parser = argparse.ArgumentParser( 195 parser = argparse.ArgumentParser(
197 description='Utilities to support installing Python libraries.') 196 description='Utilities to support installing Python libraries.')
198 parser.add_argument('-l', action='store_const', const=0, 197 parser.add_argument('-l', action='store_const', const=0,
199 default=10, dest='maxlevels', 198 default=10, dest='maxlevels',
200 help="don't recurse into subdirectories") 199 help="don't recurse into subdirectories")
200 parser.add_argument('-r', type=int, dest='recursion',
201 help=('control the maximum recursion level. '
202 'if `-l` and `-r` options are specified, '
203 'then `-r` takes precedence.'))
201 parser.add_argument('-f', action='store_true', dest='force', 204 parser.add_argument('-f', action='store_true', dest='force',
202 help='force rebuild even if timestamps are up to date') 205 help='force rebuild even if timestamps are up to date')
203 parser.add_argument('-q', action='store_true', dest='quiet', 206 parser.add_argument('-q', action='store_true', dest='quiet',
204 help='output only error messages') 207 help='output only error messages')
205 parser.add_argument('-b', action='store_true', dest='legacy', 208 parser.add_argument('-b', action='store_true', dest='legacy',
206 help='use legacy (pre-PEP3147) compiled file locations') 209 help='use legacy (pre-PEP3147) compiled file locations')
207 parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None, 210 parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None,
208 help=('directory to prepend to file paths for use in ' 211 help=('directory to prepend to file paths for use in '
209 'compile-time tracebacks and in runtime ' 212 'compile-time tracebacks and in runtime '
210 'tracebacks in cases where the source file is ' 213 'tracebacks in cases where the source file is '
211 'unavailable')) 214 'unavailable'))
212 parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None, 215 parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None,
213 help=('skip files matching the regular expression; ' 216 help=('skip files matching the regular expression; '
214 'the regexp is searched for in the full path ' 217 'the regexp is searched for in the full path '
215 'of each file considered for compilation')) 218 'of each file considered for compilation'))
216 parser.add_argument('-i', metavar='FILE', dest='flist', 219 parser.add_argument('-i', metavar='FILE', dest='flist',
217 help=('add all the files and directories listed in ' 220 help=('add all the files and directories listed in '
218 'FILE to the list considered for compilation; ' 221 'FILE to the list considered for compilation; '
219 'if "-", names are read from stdin')) 222 'if "-", names are read from stdin'))
220 parser.add_argument('compile_dest', metavar='FILE|DIR', nargs='*', 223 parser.add_argument('compile_dest', metavar='FILE|DIR', nargs='*',
221 help=('zero or more file and directory names ' 224 help=('zero or more file and directory names '
222 'to compile; if no arguments given, defaults ' 225 'to compile; if no arguments given, defaults '
223 'to the equivalent of -l sys.path')) 226 'to the equivalent of -l sys.path'))
224 parser.add_argument('-j', '--processes', action='store', default=None, 227 parser.add_argument('-j', '--workers', default=1,
225 type=int, help='Run compileall concurrently') 228 type=int, help='Run compileall concurrently')
226 229
227 args = parser.parse_args() 230 args = parser.parse_args()
228 compile_dests = args.compile_dest 231 compile_dests = args.compile_dest
229 232
230 if (args.ddir and (len(compile_dests) != 1 233 if (args.ddir and (len(compile_dests) != 1
231 or not os.path.isdir(compile_dests[0]))): 234 or not os.path.isdir(compile_dests[0]))):
232 parser.exit('-d destdir requires exactly one directory argument') 235 parser.exit('-d destdir requires exactly one directory argument')
233 if args.rx: 236 if args.rx:
234 import re 237 import re
235 args.rx = re.compile(args.rx) 238 args.rx = re.compile(args.rx)
239
240
241 if args.recursion is not None:
242 maxlevels = args.recursion
243 else:
244 maxlevels = args.maxlevels
236 245
237 # if flist is provided then load it 246 # if flist is provided then load it
238 if args.flist: 247 if args.flist:
239 try: 248 try:
240 with (sys.stdin if args.flist=='-' else open(args.flist)) as f: 249 with (sys.stdin if args.flist=='-' else open(args.flist)) as f:
241 for line in f: 250 for line in f:
242 compile_dests.append(line.strip()) 251 compile_dests.append(line.strip())
243 except OSError: 252 except OSError:
244 print("Error reading file list {}".format(args.flist)) 253 print("Error reading file list {}".format(args.flist))
245 return False 254 return False
246 255
247 if args.processes is not None: 256 if args.workers is not None:
248 if args.processes <= 0: 257 args.workers = args.workers or os.cpu_count()
brett.cannon 2014/09/12 16:21:59 This can be `args.workers or None`.
249 args.processes = os.cpu_count()
250 258
251 success = True 259 success = True
252 try: 260 try:
253 if compile_dests: 261 if compile_dests:
254 for dest in compile_dests: 262 for dest in compile_dests:
255 if os.path.isfile(dest): 263 if os.path.isfile(dest):
256 if not compile_file(dest, args.ddir, args.force, args.rx, 264 if not compile_file(dest, args.ddir, args.force, args.rx,
257 args.quiet, args.legacy): 265 args.quiet, args.legacy):
258 success = False 266 success = False
259 else: 267 else:
260 if not compile_dir(dest, args.maxlevels, args.ddir, 268 if not compile_dir(dest, maxlevels, args.ddir,
261 args.force, args.rx, args.quiet, 269 args.force, args.rx, args.quiet,
262 args.legacy, processes=args.processes): 270 args.legacy, workers=args.workers):
263 success = False 271 success = False
264 return success 272 return success
265 else: 273 else:
266 return compile_path(legacy=args.legacy, force=args.force, 274 return compile_path(legacy=args.legacy, force=args.force,
267 quiet=args.quiet) 275 quiet=args.quiet)
268 except KeyboardInterrupt: 276 except KeyboardInterrupt:
269 print("\n[interrupted]") 277 print("\n[interrupted]")
270 return False 278 return False
271 return True 279 return True
272 280
273 281
274 if __name__ == '__main__': 282 if __name__ == '__main__':
275 exit_status = int(not main()) 283 exit_status = int(not main())
276 sys.exit(exit_status) 284 sys.exit(exit_status)
LEFTRIGHT

RSS Feeds Recent Issues | This issue
This is Rietveld 894c83f36cb7+