8000 Added LRU Cache by ruppysuppy · Pull Request #2138 · TheAlgorithms/Python · GitHub
[go: up one dir, main page]

Skip to content

Added LRU Cache #2138

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 25, 2020
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Implemented suggestions
  • Loading branch information
ruppysuppy committed Jun 21, 2020
commit 71d366170a15f1c14d7c3cbfab44b904efd75507
70 changes: 41 additions & 29 deletions other/lru_cache.py
8000
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Optional, Callable
from typing import Callable, Optional


class DoubleLinkedListNode:
Expand Down Expand Up @@ -44,12 +44,12 @@ def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode:
return node


class LruCache:
class LRUCache:
'''
LRU Cache to store a given capacity of data. Can be used as a stand-alone object
or as a function decorator.

>>> cache = LruCache(2)
>>> cache = LRUCache(2)

>>> cache.set(1, 1)

Expand All @@ -72,10 +72,10 @@ class LruCache:
>>> cache.get(4)
4

>>> cache.cache_info()
'CacheInfo(hits=3, misses=2, capacity=2, current size=2)'
>>> cache
CacheInfo(hits=3, misses=2, capacity=2, current size=2)

>>> @LruCache.decorator(100)
>>> @LRUCache.decorator(100)
... def fib(num):
... if num in (1, 2):
... return 1
Expand All @@ -85,7 +85,7 @@ class LruCache:
... res = fib(i)

>>> fib.cache_info()
'CacheInfo(hits=194, misses=99, capacity=100, current size=99)'
CacheInfo(hits=194, misses=99, capacity=100, current size=99)
'''

# class variable to map the decorator functions to their respective instance
Expand All @@ -99,6 +99,30 @@ def __init__(self, capacity: int):
self.miss = 0
self.cache = {}
Copy link
Member
@cclauss cclauss Jun 21, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why have both a cache and a decorator_function_to_instance_map? Could we have one instead of two?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need both as the cache maps the keys to the Double Linked List Node


Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

def __contains__(key) -> bool:
    """
    >>> cache = LruCache(1)
    >>> 1 in cache
    False
    >>> set(1, 1)
    >>> 1 in cache
    True
    """
    return key in self.cache

Copy link
Member
@cclauss cclauss Jun 21, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

__contains__() is the modern version of has_key(). It enables the use of in.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this commen 8000 t to others. Learn more.

One of the coolest submissions ever!!

Thanks a lot :) 👍

def __repr__(self) -> str:
'''
Return the details for the cache instance
[hits, misses, capacity, current_size]
'''

return (f'CacheInfo(hits={self.hits}, misses={self.miss}, '
f'capacity={self.capacity}, current size={self.num_keys})')

def __contains__(self, key: int) -> bool:
'''
>>> cache = LRUCache(1)

>>> 1 in cache
False

>>> cache.set(1, 1)

>>> 1 in cache
True
'''

return key in self.cache

def get(self, key: int) -> Optional[int]:
'''
Returns the value for the input key and updates the Double Linked List. Returns
Expand Down Expand Up @@ -132,15 +156,6 @@ def set(self, key: int, value: int) -> None:
node.val = value
self.list.add(node)

def cache_info(self) -> str:
'''
Returns the details for the cache instance
[hits, misses, capacity, current size]
'''

return f'CacheInfo(hits={self.hits}, misses={self.miss}, \
capacity={self.capacity}, current size={self.num_keys})'

@staticmethod
def decorator(size: int = 128):
'''
Expand All @@ -150,22 +165,19 @@ def decorator(size: int = 128):
def cache_decorator_inner(func: Callable):

def cache_decorator_wrapper(*args, **kwargs):
if func not in LruCache.decorator_function_to_instance_map:
LruCache.decorator_function_to_instance_map[func] = LruCache(size)

result = LruCache.decorator_function_to_instance_map[func].get(args[0])

if result is not None:
return result

result = func(*args, **kwargs)
LruCache.decorator_function_to_instance_map[func].set(args[0], result)
if func not in LRUCache.decorator_function_to_instance_map:
LRUCache.decorator_function_to_instance_map[func] = LRUCache(size)

result = LRUCache.decorator_function_to_instance_map[func].get(args[0])
if result is None:
result = func(*args, **kwargs)
LRUCache.decorator_function_to_instance_map[func].set(
args[0], result
)
return result

def cache_info():
if func not in LruCache.decorator_function_to_instance_map:
return "Cache for function not initialized"
return LruCache.decorator_function_to_instance_map[func].cache_info()
return LRUCache.decorator_function_to_instance_map[func]

cache_decorator_wrapper.cache_info = cache_info

Expand Down
0