diff --git a/Lib/functools.py b/Lib/functools.py --- a/Lib/functools.py +++ b/Lib/functools.py @@ -574,14 +574,16 @@ last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link - full = (len(cache) >= maxsize) + # Use the __len__() method instead of the len() function + # which could potentially be wrapped in an lru_cache itself. + full = (cache.__len__() >= maxsize) misses += 1 return result def cache_info(): """Report cache statistics""" with lock: - return _CacheInfo(hits, misses, maxsize, len(cache)) + return _CacheInfo(hits, misses, maxsize, cache.__len__()) def cache_clear(): """Clear the cache and cache statistics""" diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1,4 +1,5 @@ import abc +import builtins import collections import copy from itertools import permutations @@ -1189,6 +1190,18 @@ self.assertEqual(misses, 4) self.assertEqual(currsize, 2) + def test_lru_reentrancy_with_len(self): + # Test to make sure the LRU cache code isn't thrown-off by + # caching the built-in len() function. Since len() can be + # cached, we shouldn't use it inside the lru code itself. + old_len = builtins.len + try: + builtins.len = self.module.lru_cache(4)(len) + for i in [0, 0, 1, 2, 3, 3, 4, 5, 6, 1, 7, 2, 1]: + self.assertEqual(len('abcdefghijklmn'[:i]), i) + finally: + builtins.len = old_len + def test_lru_type_error(self): # Regression test for issue #28653. # lru_cache was leaking when one of the arguments