简介
LRU(Least recently used,最近最少使用)算法根据数据的历史访问记录来进行淘汰数据,其核心思想是“如果数据最近被访问过,那么将来被访问的几率也更高”。
1、新数据插入到链表头部;
2、每当缓存命中(即缓存数据被访问),则将数据移到链表头部;
3、当链表满的时候,将链表尾部的数据丢弃。
实现
#ifndef LRU_LRUCACHE_H_
#define LRU_LRUCACHE_H_
#include<iostream>
#include<unordered_map>
#include<list>
#include<utility>
#include<mutex>
using namespace std;
class LRUCache
{
public:
LRUCache(int capacity);
~LRUCache();
int Get(int key);
void Put(int key,int value);
private:
int capacity_;//cashe的大小
mutex mutex_;
list<pair<int, int>> caches_;
unordered_map< int, list<pair<int, int> > ::iterator> caches_map_;
};
#endif
#include"lrucache.h"
LRUCache::LRUCache(int capacity)
{
capacity_ = capacity;
}
LRUCache::~LRUCache()
{
}
int LRUCache::Get(int key)
{
int ret_value = -1;
unique_lock<mutex> locker(mutex_);
auto iter_map = caches_map_.find(key);
if (iter_map != caches_map_.end())
{
ret_value = iter_map->second->second;
auto iter_list= iter_map->second;
pair<int, int> tmp_item = *iter_list;
caches_.erase(iter_list);
caches_.push_front(tmp_item);
caches_map_[key] = caches_.begin();
}
return ret_value;
}
void LRUCache::Put(int key,int value)
{
unique_lock<mutex> locker(mutex_);
auto iter_map = caches_map_.find(key);
if(iter_map != caches_map_.end())
{
auto iter_list = iter_map->second;
iter_list->second = value;
pair<int,int> temp_item = *iter_list;
caches_.erase(iter_list);
caches_.push_front(temp_item);
}
else
{
pair<int, int > tmp_pair = make_pair(key, value);
if(capacity_ == caches_.size())
{
int del_key = caches_.back().first;
caches_.pop_back();
auto del_iter = caches_map_.find(del_key);
caches_map_.erase(del_iter);
}
caches_.push_front(tmp_pair);
caches_map_[key] = caches_.begin();
}
}