AI写作智能体 自主规划任务,支持联网查询和网页读取,多模态高效创作各类分析报告、商业计划、营销方案、教学内容等。 广告
## 前言 LRU 是 Least Recently Used 的简写,字面意思是最近最少使用。 通常用于缓存的淘汰策略实现,由于缓存的内存非常宝贵,所以需要根据某种规则来剔除数据保证内存不被撑满。 ## 代码实现 ```cpp #ifndef _LRU_CACHE_H_ #define _LRU_CACHE_H_ #include <unordered_map> /* * LRU是Least Recently Used的简写,字面意思是最近最少使用,通常用于缓存淘汰策略 * 维护一个双向链表,并用无序关联式容器unordered_map存储链表的节点 */ template <typename Key, typename Value> class LRUCache { private: struct Node { Key key; Value value; Node* prev; Node* next; Node(Key k, Value v) : key(k) , value(v) , prev(nullptr) , next(nullptr) { } }; public: LRUCache(size_t capacity) : capacity_(capacity) , head_(nullptr) , tail_(nullptr) { } ~LRUCache() { while (head_ != nullptr) { Node* next = head_->next; delete head_; head_ = next; } } /* * @brief 获取缓存值 * 根据key获取value,不存在返回nullptr * 存在,则在双向链表中删除该节点,再将节点添加到表头 */ Value* get(Key key) { auto it = datas_.find(key); if (it == datas_.end()) { return nullptr; } else { Node* node = datas_[key]; remove(node); appendHead(node); return &node->value; } } /* * @brief 将键值对放进缓存中 * 缓存已存在,更新value,并在双向链表中删除该节点,再将节点添加到表头 * 不存在,创建节点node,如果当前缓存大小小于缓存容量,直接将节点添加到 * 表头即可,否则将双向链表的尾结点在关联式容器hashMap中删除,然后在双 * 向链表中也删除尾节点,最后将新节点添加到表头和hashMap中 */ void put(Key key, Value value) { auto it = datas_.find(key); if (it != datas_.end()) { Node* node = it->second; node->value = value; remove(node); appendHead(node); } else { Node* node = new Node(key, value); if (datas_.size() < capacity_) { appendHead(node); datas_[key] = node; } else { datas_.erase(tail_->key); Node* delNode = tail_; remove(delNode); delete delNode; appendHead(node); datas_[key] = node; } } } private: /* * 将节点添加到双向链表的头部 */ void appendHead(Node* node) { if (head_ == nullptr) head_ = tail_ = node; else { node->next = head_; head_->prev = node; head_ = node; } } /* * 在双向链表删除node节点,但不销毁节点,主要是为了其他方法可以通用 */ void remove(Node* node) { if (head_ == tail_) { head_ = tail_ = nullptr; } else if (head_ == node) { head_ = node->next; head_->prev = nullptr; } else if (tail_ == node) { tail_ = node->prev; tail_->next = nullptr; } else { node->prev->next = node->next; node->next->prev = node->prev; } node->next = nullptr; node->prev = nullptr; } private: size_t capacity_; // 缓存容量 Node* head_; // 双向链表的头结点 Node* tail_; // 双向链表的尾结点 std::unordered_map<int, Node*> datas_; // 无序关联式容器 }; #endif ``` ## 利用STL list简单实现 ```cpp class LRUCache { public: LRUCache(int capacity) : capacity_(capacity) { } int get(int key) { auto it = table_.find(key); if (it == table_.end()) return -1; lru_.remove(key); lru_.push_front(key); return it->second; } void put(int key, int value) { auto it = table_.find(key); if (it != table_.end()) { lru_.remove(key); lru_.push_front(key); it->second = value; } else { if (lru_.size() == capacity_) { int tmp = lru_.back(); lru_.pop_back(); table_.erase(tmp); } lru_.push_front(key); table_[key] = value; } } private: int capacity_; list<int> lru_; unordered_map<int, int> table_; }; ``` 富途三面要求set,get均为O(n) ```cpp // 插入获取效率均为 O(1) class LRU { public: LRU(int cap) : capacity_(cap) { } void put(int key, int val) { auto it = table_.find(key); if (it != table_.end()) { auto delIter = it->second; lru_.erase(delIter); lru_.push_front({key, val}); it->second = lru_.begin(); } else { if (lru_.size() == capacity_) { auto delIter = lru_.back(); table_.erase(delIter.first); lru_.pop_back(); } lru_.push_front({key, val}); table_[key] = lru_.begin(); } } int* get(int key) { auto it = table_.find(key); if (it != table_.end()) { lru_.push_front(*it->second); lru_.erase(it->second); table_[key] = lru_.begin(); return &lru_.begin()->second; } else { return nullptr; } } private: int capacity_; list<pair<int, int>> lru_; unordered_map<int, list<pair<int, int>>::iterator> table_; }; ```