|
| 1 | +""" |
| 2 | +@Author: Aseem Jain |
| 3 | +@Linkedin: https://www.linkedin.com/in/premaseem/ |
| 4 | +@Github: https://github.com/premaseem/pythonLab/tree/master/challenge |
| 5 | +
|
| 6 | +# What is Cache? |
| 7 | +A cache is like short-term memory: it has a limited amount of space, but is typically faster than the original data source and contains the most recently accessed items. Caches can exist at all levels in architecture, but are often found at the level nearest to the front end, where they are implemented to return data quickly without taxing downstream levels. |
| 8 | +
|
| 9 | +# Following are some of the most common cache eviction policies: |
| 10 | + First In First Out (FIFO): The cache evicts the first block accessed first without any regard to how often or how many times it was accessed before. |
| 11 | + Least Recently Used (LRU): Discards the least recently used items first. |
| 12 | + Least Frequently Used (LFU): Counts how often an item is needed. Those that are used least often are discarded first. |
| 13 | + Time To Live (TTL): Data is remains in the cache for specific time like 300 sec and deleted after that. It can be renewed upon usage if needed. |
| 14 | + Random Replacement (RR): Randomly selects a candidate item and discards it to make space when necessary. |
| 15 | +
|
| 16 | +""" |
| 17 | + |
| 18 | +class Cache: |
| 19 | + |
| 20 | + def __init__(self, capacity): |
| 21 | + self.capacity = capacity |
| 22 | + self.cache = { } |
| 23 | + self.queue = [] |
| 24 | + |
| 25 | + def evict(self): |
| 26 | + """ Based on Least Recently used ie unused items would be evicted """ |
| 27 | + front_element = self.queue.pop(0) |
| 28 | + print("element popping ", front_element) |
| 29 | + if front_element in self.cache: |
| 30 | + self.cache.pop(front_element) |
| 31 | + |
| 32 | + def load_cache(self, x): |
| 33 | + if len(self.queue) > self.capacity: |
| 34 | + self.evict() |
| 35 | + self.cache[x] = [1] |
| 36 | + |
| 37 | + def rearrange_queue(self,x): |
| 38 | + if x in self.queue: |
| 39 | + self.queue.remove(x) |
| 40 | + self.queue.append(x) |
| 41 | + |
| 42 | + def get_data(self, x): |
| 43 | + self.rearrange_queue(x) |
| 44 | + |
| 45 | + if self.cache.get(x): |
| 46 | + return "hit" |
| 47 | + |
| 48 | + self.load_cache(x) |
| 49 | + return "miss" |
| 50 | + |
| 51 | + |
| 52 | +test_data = [ |
| 53 | + ("a", "miss"), |
| 54 | + ("a", "hit"), |
| 55 | + ("b", "miss"), |
| 56 | + ("b", "hit"), |
| 57 | + ("a", "hit"), |
| 58 | + ("c", "miss"), |
| 59 | + ("d", "miss"), |
| 60 | + ("e", "miss"), |
| 61 | + |
| 62 | + # till 5th element, it will hit |
| 63 | + ("a", "hit"), |
| 64 | + ("f", "miss"), |
| 65 | + ("g", "miss"), |
| 66 | + |
| 67 | + # after overcapacity, it will evict and least used item |
| 68 | + ("c", "miss"), |
| 69 | + |
| 70 | + # it will reload again and evict front element in queue |
| 71 | + ("c", "hit"), |
| 72 | + ("d", "miss"), |
| 73 | + ("z", "miss"), |
| 74 | + ("z", "hit"), |
| 75 | + ("b", "miss"), |
| 76 | + ("b", "hit"), |
| 77 | + ("d", "hit") |
| 78 | + |
| 79 | +] |
| 80 | + |
| 81 | +# Create capacity with 5 |
| 82 | +my_cache = Cache(5) |
| 83 | +for given, expected in test_data: |
| 84 | + print("Cache ", my_cache.cache.keys()) |
| 85 | + print("Queue", my_cache.queue) |
| 86 | + print(f"Testing with given = {given} and expected = {expected}") |
| 87 | + assert expected == my_cache.get_data(given) |
| 88 | + |
0 commit comments