Leetcode: LRU cache
Given the following code skeleton, implement an LRU cache with a variable capacity. The time complexity for get
and put
should be O(1)
respectively.
class LRUCache:
def __init__(self, capacity: int):
pass
def get(self, key: int) -> int:
pass
def put(self, key: int, value: int) -> None:
pass
value
in the map at the key
position. This operation is O(n)
. If the value already exists, we need to update it.
This map unfortunately has no information at what time an item was added to the queue. For this reason we need another data structure that allows us to load, move, insert and delete an item with O(1)
. A doubly linked list allows us to do exactly this: We can store a reference to the item in the map. Then we can move it out of the list with a few operations, add it to the end and we also have constant access to the beginning of the list. Unfortunately, Python does not have a doubly linked list builtin and we need to create it ourselves.
I chose to create a dummy head
and tail
element to be able to skip several if/else
checks. The solution then looks as follows:
class Node:
def __init__(self, k, v, p=None, n=None) -> None:
self.k = k
self.v = v
self.p = p
self.n = n
class DoubleLinkedList:
def __init__(self):
# Head and tail are dummy elements
self._head = Node(None, None, None, None)
self._tail = Node(None, None, self._head, None)
self._head.n = self._tail
def add(self, node):
self._tail.p.n = node
node.p = self._tail.p
node.n = self._tail
self._tail.p = node
def remove(self, node):
node.p.n = node.n
node.n.p = node.p
def pop(self):
if self._head.n == self._tail:
return None
n = self._head.n
self._head.n = self._head.n.n
self._head.n.p = self._head
return n
def move_to_end(self, node):
node.p.n = node.n
node.n.p = node.p
node.n = self._tail
node.p = self._tail.p
node.p.n = node
self._tail.p = node
class LRUCache:
def __init__(self, capacity: int):
self._cap = capacity
self._cache = {}
self._lru = DoubleLinkedList()
def get(self, key: int) -> int:
if key not in self._cache:
return -1
n = self._cache[key]
self._lru.move_to_end(n)
return n.v
def put(self, key: int, value: int) -> None:
if key in self._cache:
n = self._cache[key]
n.v = value
self._lru.move_to_end(n)
else:
self._cache[key] = Node(key, value)
self._lru.add(self._cache[key])
if len(self._cache) > self._cap:
n = self._lru.pop()
del self._cache[n.k]
lru = LRUCache(2)
lru.put(1,2)
lru.put(2,2)
print(lru.get(1))
lru.put(3, 2)
print(lru.get(2))
print(lru.get(3))
Runtime: 949 ms, faster than 84.93% of Python3 online submissions for LRU Cache. Memory Usage: 75 MB, less than 82.05% of Python3 online submissions for LRU Cache.
Using OrderedDict
A much simpler way when solving this with Python is to use a collections.OrderedDict
. Under the hood, this dictionary (map) keeps the order of the inserted keys via a double linked list. The implementation looks as follows:
from collections import OrderedDict
class LRUCache:
def __init__(self, capacity: int):
self._store = OrderedDict()
self._cap = capacity
def get(self, key: int) -> int:
if key not in self._store:
return -1
self._store.move_to_end(key)
return self._store[key]
def put(self, key: int, value: int) -> None:
self._store[key] = value
self._store.move_to_end(key)
if len(self._store) > self._cap:
self._store.popitem(last=False)