1"""`functools.lru_cache` compatible memoizing function decorators."""
2
3__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache")
4
5import collections
6import functools
7import math
8import random
9import time
10
11try:
12    from threading import RLock
13except ImportError:  # pragma: no cover
14    from dummy_threading import RLock
15
16from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
17from . import keys
18
19
20_CacheInfo = collections.namedtuple(
21    "CacheInfo", ["hits", "misses", "maxsize", "currsize"]
22)
23
24
25class _UnboundCache(dict):
26    @property
27    def maxsize(self):
28        return None
29
30    @property
31    def currsize(self):
32        return len(self)
33
34
35class _UnboundTTLCache(TTLCache):
36    def __init__(self, ttl, timer):
37        TTLCache.__init__(self, math.inf, ttl, timer)
38
39    @property
40    def maxsize(self):
41        return None
42
43
44def _cache(cache, typed):
45    maxsize = cache.maxsize
46
47    def decorator(func):
48        key = keys.typedkey if typed else keys.hashkey
49        lock = RLock()
50        stats = [0, 0]
51
52        def wrapper(*args, **kwargs):
53            k = key(*args, **kwargs)
54            with lock:
55                try:
56                    v = cache[k]
57                    stats[0] += 1
58                    return v
59                except KeyError:
60                    stats[1] += 1
61            v = func(*args, **kwargs)
62            # in case of a race, prefer the item already in the cache
63            try:
64                with lock:
65                    return cache.setdefault(k, v)
66            except ValueError:
67                return v  # value too large
68
69        def cache_info():
70            with lock:
71                hits, misses = stats
72                maxsize = cache.maxsize
73                currsize = cache.currsize
74            return _CacheInfo(hits, misses, maxsize, currsize)
75
76        def cache_clear():
77            with lock:
78                try:
79                    cache.clear()
80                finally:
81                    stats[:] = [0, 0]
82
83        wrapper.cache_info = cache_info
84        wrapper.cache_clear = cache_clear
85        wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
86        functools.update_wrapper(wrapper, func)
87        return wrapper
88
89    return decorator
90
91
92def fifo_cache(maxsize=128, typed=False):
93    """Decorator to wrap a function with a memoizing callable that saves
94    up to `maxsize` results based on a First In First Out (FIFO)
95    algorithm.
96
97    """
98    if maxsize is None:
99        return _cache(_UnboundCache(), typed)
100    elif callable(maxsize):
101        return _cache(FIFOCache(128), typed)(maxsize)
102    else:
103        return _cache(FIFOCache(maxsize), typed)
104
105
106def lfu_cache(maxsize=128, typed=False):
107    """Decorator to wrap a function with a memoizing callable that saves
108    up to `maxsize` results based on a Least Frequently Used (LFU)
109    algorithm.
110
111    """
112    if maxsize is None:
113        return _cache(_UnboundCache(), typed)
114    elif callable(maxsize):
115        return _cache(LFUCache(128), typed)(maxsize)
116    else:
117        return _cache(LFUCache(maxsize), typed)
118
119
120def lru_cache(maxsize=128, typed=False):
121    """Decorator to wrap a function with a memoizing callable that saves
122    up to `maxsize` results based on a Least Recently Used (LRU)
123    algorithm.
124
125    """
126    if maxsize is None:
127        return _cache(_UnboundCache(), typed)
128    elif callable(maxsize):
129        return _cache(LRUCache(128), typed)(maxsize)
130    else:
131        return _cache(LRUCache(maxsize), typed)
132
133
134def mru_cache(maxsize=128, typed=False):
135    """Decorator to wrap a function with a memoizing callable that saves
136    up to `maxsize` results based on a Most Recently Used (MRU)
137    algorithm.
138    """
139    if maxsize is None:
140        return _cache(_UnboundCache(), typed)
141    elif callable(maxsize):
142        return _cache(MRUCache(128), typed)(maxsize)
143    else:
144        return _cache(MRUCache(maxsize), typed)
145
146
147def rr_cache(maxsize=128, choice=random.choice, typed=False):
148    """Decorator to wrap a function with a memoizing callable that saves
149    up to `maxsize` results based on a Random Replacement (RR)
150    algorithm.
151
152    """
153    if maxsize is None:
154        return _cache(_UnboundCache(), typed)
155    elif callable(maxsize):
156        return _cache(RRCache(128, choice), typed)(maxsize)
157    else:
158        return _cache(RRCache(maxsize, choice), typed)
159
160
161def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
162    """Decorator to wrap a function with a memoizing callable that saves
163    up to `maxsize` results based on a Least Recently Used (LRU)
164    algorithm with a per-item time-to-live (TTL) value.
165    """
166    if maxsize is None:
167        return _cache(_UnboundTTLCache(ttl, timer), typed)
168    elif callable(maxsize):
169        return _cache(TTLCache(128, ttl, timer), typed)(maxsize)
170    else:
171        return _cache(TTLCache(maxsize, ttl, timer), typed)
172