Вот простой и эффективный LRU-кэш, написанный на простом Python-коде, который работает на любом питоне версии 1.5.2 или новее:
class LRU_Cache:
def __init__(self, original_function, maxsize=1000):
self.original_function = original_function
self.maxsize = maxsize
self.mapping = {}
PREV, NEXT, KEY, VALUE = 0, 1, 2, 3 # link fields
self.head = [None, None, None, None] # oldest
self.tail = [self.head, None, None, None] # newest
self.head[NEXT] = self.tail
def __call__(self, *key):
PREV, NEXT = 0, 1
mapping, head, tail = self.mapping, self.head, self.tail
link = mapping.get(key, head)
if link is head:
value = self.original_function(*key)
if len(mapping) >= self.maxsize:
old_prev, old_next, old_key, old_value = head[NEXT]
head[NEXT] = old_next
old_next[PREV] = head
del mapping[old_key]
last = tail[PREV]
link = [last, tail, key, value]
mapping[key] = last[NEXT] = tail[PREV] = link
else:
link_prev, link_next, key, value = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = tail[PREV]
last[NEXT] = tail[PREV] = link
link[PREV] = last
link[NEXT] = tail
return value
if __name__ == '__main__':
p = LRU_Cache(pow, maxsize=3)
for i in [1,2,3,4,5,3,1,5,1,1]:
print(i, p(i, 2))