146. LRU Cache
solution
# 一个类似字典item的node, 同时记录其前后的位置
class Node:
def __init__(self, key=0, value=0):
self.key = key
self.value = value
self.prev = None
self.next = None
class LRUCache:
def __init__(self, capacity: int):
self.capacity = capacity
self.cache = {} # key到Node的索引, 对于linked list, tree, graph等通过局部逐渐访问, 建立一个全局的快速访问
self.head = Node() # dummy head and tail
self.tail = Node()
self.head.next = self.tail
self.tail.prev = self.head
def get(self, key: int) -> int:
if key not in self.cache:
return -1
node = self.cache[key]
self.remove_node(node)
self.add_to_head(node)
return node.value
def put(self, key: int, value: int) -> None:
if key in self.cache:
node = self.cache[key]
node.value = value # 记得更新value
self.remove_node(node)
self.add_to_head(node)
else:
node = Node(key, value)
self.cache[key] = node
self.add_to_head(node)
if len(self.cache) > self.capacity:
self.remove_tail()
def remove_node(self, node):
node.prev.next = node.next
node.next.prev = node.prev
def add_to_head(self, node):
self.head.next.prev = node
node.next = self.head.next
self.head.next = node
node.prev = self.head
def remove_tail(self):
del self.cache[self.tail.prev.key]
node = self.tail.prev
self.remove_node(node)follow up
Last updated