Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code | Sign in
(182293)

Side by Side Diff: Lib/functools.py

Issue 14373: C implementation of functools.lru_cache
Patch Set: Created 7 years, 3 months ago
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments. Please Sign in to add in-line comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | Lib/test/test_functools.py » ('j') | Modules/_functoolsmodule.c » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 """functools.py - Tools for working with functions and callable objects 1 """functools.py - Tools for working with functions and callable objects
2 """ 2 """
3 # Python module wrapper for _functools C module 3 # Python module wrapper for _functools C module
4 # to allow utilities written in Python to be added 4 # to allow utilities written in Python to be added
5 # to the functools module. 5 # to the functools module.
6 # Written by Nick Coghlan <ncoghlan at gmail.com> 6 # Written by Nick Coghlan <ncoghlan at gmail.com>
7 # and Raymond Hettinger <python at rcn.com> 7 # and Raymond Hettinger <python at rcn.com>
8 # Copyright (C) 2006-2010 Python Software Foundation. 8 # Copyright (C) 2006-2010 Python Software Foundation.
9 # See C source code for _functools credits/copyright 9 # See C source code for _functools credits/copyright
10 10
11 __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', 11 __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
12 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial'] 12 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
13 13
14 from _functools import partial, reduce 14 from _functools import partial, reduce, c_lru_cache
storchaka 2012/11/24 15:02:52 Here is a merge conflict with current code. Please
15 from collections import namedtuple 15 from collections import namedtuple
16 try: 16 try:
17 from _thread import allocate_lock as Lock 17 from _thread import allocate_lock as Lock
18 except: 18 except:
19 from _dummy_thread import allocate_lock as Lock 19 from _dummy_thread import allocate_lock as Lock
20 20
21 21
22 ################################################################################ 22 ################################################################################
23 ### update_wrapper() and wraps() decorator 23 ### update_wrapper() and wraps() decorator
24 ################################################################################ 24 ################################################################################
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
155 Arguments to the cached function must be hashable. 155 Arguments to the cached function must be hashable.
156 156
157 View the cache statistics named tuple (hits, misses, maxsize, currsize) 157 View the cache statistics named tuple (hits, misses, maxsize, currsize)
158 with f.cache_info(). Clear the cache and statistics with f.cache_clear(). 158 with f.cache_info(). Clear the cache and statistics with f.cache_clear().
159 Access the underlying function with f.__wrapped__. 159 Access the underlying function with f.__wrapped__.
160 160
161 See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used 161 See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
162 162
163 """ 163 """
164 164
165 # Users should only access the lru_cache through its public API:
166 # cache_info, cache_clear, and f.__wrapped__
167 # The internals of the lru_cache are encapsulated for thread safety and
168 # to allow the implementation to change (including a possible C version).
169
170 def decorating_function(user_function): 165 def decorating_function(user_function):
171 166 wrapper = c_lru_cache(user_function, maxsize, typed, _CacheInfo)
172 cache = {}
173 hits = misses = 0
174 kwd_mark = (object(),) # separate positional and keyword args
175 cache_get = cache.get # bound method to lookup a key or return None
176 sentinel = object() # unique object used with cache_get
177 _len = len # localize the global len() function
178 lock = Lock() # because linkedlist updates aren't threadsafe
179 root = [] # root of the circular doubly linked list
180 root[:] = [root, root, None, None] # initialize by pointing to self
181 PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
182
183 def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type):
184 # build a cache key from positional and keyword args
185 key = args
186 if kwds:
187 sorted_items = tuple(sorted(kwds.items()))
188 key += kwd_mark + sorted_items
189 if typed:
190 key += tuple(type(v) for v in args)
191 if kwds:
192 key += tuple(type(v) for k, v in sorted_items)
193 return key
194
195 if maxsize == 0:
196
197 def wrapper(*args, **kwds):
198 # no caching, just a statistics update after a successful call
199 nonlocal misses
200 result = user_function(*args, **kwds)
201 misses += 1
202 return result
203
204 elif maxsize is None:
205
206 def wrapper(*args, **kwds):
207 # simple caching without ordering or size limit
208 nonlocal hits, misses
209 key = make_key(args, kwds, typed) if kwds or typed else args
210 result = cache_get(key, sentinel)
211 if result is not sentinel:
212 hits += 1
213 return result
214 result = user_function(*args, **kwds)
215 cache[key] = result
216 misses += 1
217 return result
218
219 else:
220
221 def wrapper(*args, **kwds):
222 # size limited caching that tracks accesses by recency
223 nonlocal root, hits, misses
224 key = make_key(args, kwds, typed) if kwds or typed else args
225 with lock:
226 link = cache_get(key)
227 if link is not None:
228 # move the link to the front of the circular queue
229 link_prev, link_next, key, result = link
230 link_prev[NEXT] = link_next
231 link_next[PREV] = link_prev
232 last = root[PREV]
233 last[NEXT] = root[PREV] = link
234 link[PREV] = last
235 link[NEXT] = root
236 hits += 1
237 return result
238 result = user_function(*args, **kwds)
239 with lock:
240 if _len(cache) < maxsize:
241 # put result in a new link at the front of the queue
242 last = root[PREV]
243 link = [last, root, key, result]
244 cache[key] = last[NEXT] = root[PREV] = link
245 else:
246 # use root to store the new key and result
247 root[KEY] = key
248 root[RESULT] = result
249 cache[key] = root
250 # empty the oldest link and make it the new root
251 root = root[NEXT]
252 del cache[root[KEY]]
253 root[KEY] = root[RESULT] = None
254 misses += 1
255 return result
256
257 def cache_info():
258 """Report cache statistics"""
259 with lock:
260 return _CacheInfo(hits, misses, maxsize, len(cache))
261
262 def cache_clear():
263 """Clear the cache and cache statistics"""
264 nonlocal hits, misses
265 with lock:
266 cache.clear()
267 root[:] = [root, root, None, None]
268 hits = misses = 0
269
270 wrapper.cache_info = cache_info
271 wrapper.cache_clear = cache_clear
272 return update_wrapper(wrapper, user_function) 167 return update_wrapper(wrapper, user_function)
273 168
274 return decorating_function 169 return decorating_function
OLDNEW
« no previous file with comments | « no previous file | Lib/test/test_functools.py » ('j') | Modules/_functoolsmodule.c » ('J')

RSS Feeds Recent Issues | This issue
This is Rietveld 894c83f36cb7+