使用STL實現LRU緩存

仿照http://blog.csdn.net/l402398703/article/details/22012551寫了一遍LRUCache,自己對LRUCache的理解確實深刻了不少。


cached_map :使用hash_map,便於根據用戶提供的Key來找到相應的data

head,tail :表示所有當前已經使用的node節點,這裏面的節點和cached_map中是一樣的

cached_entries:表示剩餘的空閒節點,當新添加元素時,如果cache_entries中還有空閒節點,則取出來,插入到head頭部


#include <stdio.h>
#include <stdlib.h>
#include <string>
#include <vector>
#include <ext/hash_map>
#include <assert.h>

using namespace __gnu_cxx;

template <class KeyType, class DataType>
struct Node
{
	KeyType key;
	DataType data;
	struct Node * prev, * next;	
};

template <class KeyType, class DataType>
class LRUCache
{
private:
	hash_map<KeyType, Node<KeyType, DataType> *> cached_map;
	Node<KeyType, DataType> * head, * tail;
	
	vector<Node<KeyType, DataType> *> cached_entries;	
	Node<KeyType, DataType> * entries; 
	
	bool pthread_safe;
	pthread_mutex_t cached_mutex;

private:
	void cache_lock()
	{
		if(pthread_safe)
			pthread_mutex_lock(&cached_mutex); 
	}

	void cache_unlock()
	{
		if(pthread_safe)
			pthread_mutex_unlock(&cached_mutex); 
	}

	void attach(Node<KeyType, DataType> * node)
	{
		node->prev = head;
		node->next = head->next;
		head->next->prev = node;
		head->next = node;
	}

	void detach(Node<KeyType, DataType> * node)
	{
		node->prev->next = node->next;
		node->next->prev = node->prev;
	}
	
public:

	void put(KeyType Key, DataType data);
	DataType get(KeyType key);
	
	LRUCache(int size, bool is_pthread_safe = false)
	{
		int i = 0;
		
		pthread_safe = is_pthread_safe;
		if(size < 0)
			size = 1024;

		entries = new Node<KeyType, DataType>[size];	
		assert(entries != NULL);
		for(i = 0; i < size; i++)
			cached_entries.push_back(&entries[i]);

		head = new Node<KeyType, DataType>();
		tail = new Node<KeyType, DataType>();		
		assert((head != NULL) && (tail != NULL));
		head->prev = NULL;
		tail->next = NULL;
		head->next = tail;
		tail->prev = head;

		if(pthread_safe)
			pthread_mutex_init(&cached_mutex, NULL); 
		
	}

	~LRUCache()
	{
		if(pthread_safe)
			pthread_mutex_destroy(&cached_mutex); 
		delete   head;
		delete   tail;
		delete[] entries;
	}	
};

template <class KeyType, class DataType>
void LRUCache<KeyType, DataType>::put(KeyType Key, DataType data)
{
	cache_lock();
	Node<KeyType, DataType> * node = cached_map[Key];
	if(NULL != node)
	{	
		detach(node);					
	}
	else
	{
		if(cached_entries.empty())
		{
			node = tail->prev;
			detach(node);
			cached_map.erase(node->key);			
		}
		else
		{
			node = cached_entries.back();
			cached_entries.pop_back();
		}
	}
	node->data = data;
	node->key  = Key;
	cached_map[Key] = node;
	attach(node);	
	cache_unlock();	
}

template <class KeyType, class DataType>
DataType LRUCache<KeyType, DataType>::get(KeyType key)
{
	Node<KeyType, DataType> * node = NULL;
	Node<KeyType, DataType> unused;
	cache_lock();	
	node = cached_map[key];
	if(NULL != node)
	{
		detach(node);
		attach(node);		
	}		
	else
	{
		node = &unused;
	}
	cache_unlock();
	return node->data;
}
	
int main(int argc, char * argv[])
{
	int index = 0;
	LRUCache<int, int> cache(10);

	for(; index < 999999; index++)
		cache.put(index, index);
	
	return 0;
}



發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章