Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code | Sign in
(154040)

Side by Side Diff: Lib/functools.py

Issue 14373: C implementation of functools.lru_cache
Patch Set: Created 6 years, 7 months ago
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments. Please Sign in to add in-line comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | Lib/test/test_functools.py » ('j') | Lib/test/test_functools.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 """functools.py - Tools for working with functions and callable objects 1 """functools.py - Tools for working with functions and callable objects
2 """ 2 """
3 # Python module wrapper for _functools C module 3 # Python module wrapper for _functools C module
4 # to allow utilities written in Python to be added 4 # to allow utilities written in Python to be added
5 # to the functools module. 5 # to the functools module.
6 # Written by Nick Coghlan <ncoghlan at gmail.com> 6 # Written by Nick Coghlan <ncoghlan at gmail.com>
7 # and Raymond Hettinger <python at rcn.com> 7 # and Raymond Hettinger <python at rcn.com>
8 # Copyright (C) 2006-2010 Python Software Foundation. 8 # Copyright (C) 2006-2010 Python Software Foundation.
9 # See C source code for _functools credits/copyright 9 # See C source code for _functools credits/copyright
10 10
11 __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', 11 __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
12 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial'] 12 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
13 13
14 try: 14 try:
15 from _functools import reduce 15 from _functools import reduce
16 except ImportError: 16 except ImportError:
17 pass 17 pass
18
18 from collections import namedtuple 19 from collections import namedtuple
19 try: 20 try:
20 from _thread import allocate_lock as Lock 21 from _thread import allocate_lock as Lock
21 except: 22 except:
22 from _dummy_thread import allocate_lock as Lock 23 from _dummy_thread import allocate_lock as Lock
23 24
24 25
25 ################################################################################ 26 ################################################################################
26 ### update_wrapper() and wraps() decorator 27 ### update_wrapper() and wraps() decorator
27 ################################################################################ 28 ################################################################################
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
189 key += kwd_mark 190 key += kwd_mark
190 for item in sorted_items: 191 for item in sorted_items:
191 key += item 192 key += item
192 if typed: 193 if typed:
193 key += tuple(type(v) for v in args) 194 key += tuple(type(v) for v in args)
194 if kwds: 195 if kwds:
195 key += tuple(type(v) for k, v in sorted_items) 196 key += tuple(type(v) for k, v in sorted_items)
196 elif len(key) == 1 and type(key[0]) in fasttypes: 197 elif len(key) == 1 and type(key[0]) in fasttypes:
197 return key[0] 198 return key[0]
198 return _HashedSeq(key) 199 return _HashedSeq(key)
200
199 201
200 def lru_cache(maxsize=128, typed=False): 202 def lru_cache(maxsize=128, typed=False):
201 """Least-recently-used cache decorator. 203 """Least-recently-used cache decorator.
202 204
203 If *maxsize* is set to None, the LRU features are disabled and the cache 205 If *maxsize* is set to None, the LRU features are disabled and the cache
204 can grow without bound. 206 can grow without bound.
205 207
206 If *typed* is True, arguments of different types will be cached separately. 208 If *typed* is True, arguments of different types will be cached separately.
207 For example, f(3.0) and f(3) will be treated as distinct calls with 209 For example, f(3.0) and f(3) will be treated as distinct calls with
208 distinct results. 210 distinct results.
(...skipping 11 matching lines...) Expand all
220 # Users should only access the lru_cache through its public API: 222 # Users should only access the lru_cache through its public API:
221 # cache_info, cache_clear, and f.__wrapped__ 223 # cache_info, cache_clear, and f.__wrapped__
222 # The internals of the lru_cache are encapsulated for thread safety and 224 # The internals of the lru_cache are encapsulated for thread safety and
223 # to allow the implementation to change (including a possible C version). 225 # to allow the implementation to change (including a possible C version).
224 226
225 # Constants shared by all lru cache instances: 227 # Constants shared by all lru cache instances:
226 sentinel = object() # unique object used to signal cache misses 228 sentinel = object() # unique object used to signal cache misses
227 make_key = _make_key # build a key from the function arguments 229 make_key = _make_key # build a key from the function arguments
228 PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields 230 PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
229 231
230 def decorating_function(user_function): 232 try:
233 from _functools import c_lru_cache
storchaka 2012/12/20 11:02:21 It would be better to move import outside from lru
234 except ImportError:
235 def decorating_function(user_function):
236 cache = {}
237 hits = misses = currsize = 0
238 full = False
239 cache_get = cache.get # bound method to lookup a key or return No ne
240 lock = Lock() # because linkedlist updates aren't threads afe
241 root = [] # root of the circular doubly linked list
242 root[:] = [root, root, None, None] # initialize by pointing to s elf
231 243
232 cache = {} 244 if maxsize == 0:
233 hits = misses = currsize = 0
234 full = False
235 cache_get = cache.get # bound method to lookup a key or return None
236 lock = Lock() # because linkedlist updates aren't threadsafe
237 root = [] # root of the circular doubly linked list
238 root[:] = [root, root, None, None] # initialize by pointing to self
239 245
240 if maxsize == 0: 246 def wrapper(*args, **kwds):
247 # no caching, just a statistics update after a successful ca ll
248 nonlocal misses
249 result = user_function(*args, **kwds)
250 misses += 1
251 return result
241 252
242 def wrapper(*args, **kwds): 253 elif maxsize is None:
243 # no caching, just a statistics update after a successful call
244 nonlocal misses
245 result = user_function(*args, **kwds)
246 misses += 1
247 return result
248 254
249 elif maxsize is None: 255 def wrapper(*args, **kwds):
250 256 # simple caching without ordering or size limit
251 def wrapper(*args, **kwds): 257 nonlocal hits, misses, currsize
252 # simple caching without ordering or size limit 258 key = make_key(args, kwds, typed)
253 nonlocal hits, misses, currsize 259 result = cache_get(key, sentinel)
254 key = make_key(args, kwds, typed) 260 if result is not sentinel:
255 result = cache_get(key, sentinel)
256 if result is not sentinel:
257 hits += 1
258 return result
259 result = user_function(*args, **kwds)
260 cache[key] = result
261 misses += 1
262 currsize += 1
263 return result
264
265 else:
266
267 def wrapper(*args, **kwds):
268 # size limited caching that tracks accesses by recency
269 nonlocal root, hits, misses, currsize, full
270 key = make_key(args, kwds, typed)
271 with lock:
272 link = cache_get(key)
273 if link is not None:
274 # move the link to the front of the circular queue
275 link_prev, link_next, key, result = link
276 link_prev[NEXT] = link_next
277 link_next[PREV] = link_prev
278 last = root[PREV]
279 last[NEXT] = root[PREV] = link
280 link[PREV] = last
281 link[NEXT] = root
282 hits += 1 261 hits += 1
283 return result 262 return result
284 result = user_function(*args, **kwds) 263 result = user_function(*args, **kwds)
264 cache[key] = result
265 misses += 1
266 currsize += 1
267 return result
268
269 else:
270
271 def wrapper(*args, **kwds):
272 # size limited caching that tracks accesses by recency
273 nonlocal root, hits, misses, currsize, full
274 key = make_key(args, kwds, typed)
275 with lock:
276 link = cache_get(key)
277 if link is not None:
278 # move the link to the front of the circular queue
279 link_prev, link_next, key, result = link
280 link_prev[NEXT] = link_next
281 link_next[PREV] = link_prev
282 last = root[PREV]
283 last[NEXT] = root[PREV] = link
284 link[PREV] = last
285 link[NEXT] = root
286 hits += 1
287 return result
288 result = user_function(*args, **kwds)
289 with lock:
290 if key in cache:
291 # getting here means that this same key was added to the
292 # cache while the lock was released. since the link
293 # update is already done, we need only return the
294 # computed result and update the count of misses.
295 pass
296 elif full:
297 # use root to store the new key and result
298 root[KEY] = key
299 root[RESULT] = result
300 cache[key] = root
301 # empty the oldest link and make it the new root
302 root = root[NEXT]
303 del cache[root[KEY]]
304 root[KEY] = root[RESULT] = None
305 else:
306 # put result in a new link at the front of the queue
307 last = root[PREV]
308 link = [last, root, key, result]
309 cache[key] = last[NEXT] = root[PREV] = link
310 currsize += 1
311 full = (currsize == maxsize)
312 misses += 1
313 return result
314
315 def cache_info():
316 """Report cache statistics"""
285 with lock: 317 with lock:
286 if key in cache: 318 return _CacheInfo(hits, misses, maxsize, currsize)
287 # getting here means that this same key was added to the
288 # cache while the lock was released. since the link
289 # update is already done, we need only return the
290 # computed result and update the count of misses.
291 pass
292 elif full:
293 # use root to store the new key and result
294 root[KEY] = key
295 root[RESULT] = result
296 cache[key] = root
297 # empty the oldest link and make it the new root
298 root = root[NEXT]
299 del cache[root[KEY]]
300 root[KEY] = root[RESULT] = None
301 else:
302 # put result in a new link at the front of the queue
303 last = root[PREV]
304 link = [last, root, key, result]
305 cache[key] = last[NEXT] = root[PREV] = link
306 currsize += 1
307 full = (currsize == maxsize)
308 misses += 1
309 return result
310 319
311 def cache_info(): 320 def cache_clear():
312 """Report cache statistics""" 321 """Clear the cache and cache statistics"""
313 with lock: 322 nonlocal hits, misses, currsize, full
314 return _CacheInfo(hits, misses, maxsize, currsize) 323 with lock:
324 cache.clear()
325 root[:] = [root, root, None, None]
326 hits = misses = currsize = 0
327 full = False
315 328
316 def cache_clear(): 329 wrapper.cache_info = cache_info
317 """Clear the cache and cache statistics""" 330 wrapper.cache_clear = cache_clear
318 nonlocal hits, misses, currsize, full 331 return update_wrapper(wrapper, user_function)
319 with lock: 332 else:
320 cache.clear() 333 def decorating_function(user_function):
321 root[:] = [root, root, None, None] 334 wrapper = c_lru_cache(user_function, maxsize, typed, _CacheInfo)
322 hits = misses = currsize = 0 335 return update_wrapper(wrapper, user_function)
323 full = False
324
325 wrapper.cache_info = cache_info
326 wrapper.cache_clear = cache_clear
327 return update_wrapper(wrapper, user_function)
328 336
329 return decorating_function 337 return decorating_function
OLDNEW
« no previous file with comments | « no previous file | Lib/test/test_functools.py » ('j') | Lib/test/test_functools.py » ('J')

RSS Feeds Recent Issues | This issue
This is Rietveld 894c83f36cb7+