Skip to content

Commit

Permalink
Improve handling of race conditions in "cachetools.func" decorators.
Browse files Browse the repository at this point in the history
  • Loading branch information
Thomas Kemmer authored and tkem committed Feb 20, 2025
1 parent c403f9f commit cb5f753
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 3 deletions.
47 changes: 47 additions & 0 deletions src/cachetools/_decorators.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,51 @@
"""Extensible memoizing decorator helpers."""


def _cached_cond_info(func, cache, key, cond, info):
hits = misses = 0
pending = set()

def wrapper(*args, **kwargs):
nonlocal hits, misses
k = key(*args, **kwargs)
with cond:
cond.wait_for(lambda: k not in pending)
try:
result = cache[k]
hits += 1
return result
except KeyError:
pass
misses += 1
pending.add(k)
try:
v = func(*args, **kwargs)
try:
with cond:
cache[k] = v
except ValueError:
pass # value too large
return v
finally:
with cond:
pending.remove(k)
cond.notify_all()

def cache_clear():
nonlocal hits, misses
with cond:
cache.clear()
hits = misses = 0

def cache_info():
with cond:
return info(hits, misses)

wrapper.cache_clear = cache_clear
wrapper.cache_info = cache_info
return wrapper


def _cached_locked_info(func, cache, key, lock, info):
hits = misses = 0

Expand Down Expand Up @@ -139,6 +184,8 @@ def _cached_wrapper(func, cache, key, lock, info):
wrapper = _uncached_info(func, info)
elif lock is None:
wrapper = _cached_unlocked_info(func, cache, key, info)
elif hasattr(lock, "wait_for") and hasattr(lock, "notify_all"):
wrapper = _cached_cond_info(func, cache, key, lock, info)
else:
wrapper = _cached_locked_info(func, cache, key, lock, info)
else:
Expand Down
6 changes: 3 additions & 3 deletions src/cachetools/func.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import time

try:
from threading import RLock
from threading import Condition
except ImportError: # pragma: no cover
from dummy_threading import RLock
from dummy_threading import Condition

from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
from . import cached
Expand All @@ -28,7 +28,7 @@ def maxsize(self):
def _cache(cache, maxsize, typed):
def decorator(func):
key = keys.typedkey if typed else keys.hashkey
wrapper = cached(cache=cache, key=key, lock=RLock(), info=True)(func)
wrapper = cached(cache=cache, key=key, lock=Condition(), info=True)(func)
wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
return wrapper

Expand Down

0 comments on commit cb5f753

Please sign in to comment.