http://www.jiuzhang.com/solutions/lfu-cache/
https://github.com/chirino/hawtdb/blob/master/hawtdb/src/main/java/org/fusesource/hawtdb/util/LFUCache.java
Maintain minFrequencyKey
O(1) for all of LFU cache operations, which include insertion, access and deletion(eviction)
It requires three data structures. One is a hash table which is used to cache the key/values so that given a key we can retrieve the cache entry at O(1). Second one is a double linked list for each frequency of access. The max frequency is capped at the cache size to avoid creating more and more frequency list entries. If we have a cache of max size 4 then we will end up with 4 different frequencies. Each frequency will have a double linked list to keep track of the cache entries belonging to that particular frequency.
The third data structure would be to somehow link these frequencies lists. It can be either an array or another linked list so that on accessing a cache entry it can be easily promoted to the next frequency list in time O(1). In our article it is based on array as traversing would be faster than linked list.
https://svn.apache.org/repos/asf/activemq/trunk/activemq-kahadb-store/src/main/java/org/apache/activemq/util/LFUCache.java
http://dhruvbird.com/lfu.pdf
http://techinpad.blogspot.com/2015/10/similar-to-lru-implement-lfu-which-is.html
https://github.com/chirino/hawtdb/blob/master/hawtdb/src/main/java/org/fusesource/hawtdb/util/LFUCache.java
public class LFUCache { private final Map<Integer, CacheNode> cache; private final LinkedHashSet[] frequencyList; private int lowestFrequency; private int maxFrequency; private final int maxCacheSize; // @param capacity, an integer public LFUCache(int capacity) { // Write your code here this.cache = new HashMap<Integer, CacheNode>(capacity); this.frequencyList = new LinkedHashSet[capacity * 2]; this.lowestFrequency = 0; this.maxFrequency = capacity * 2 - 1; this.maxCacheSize = capacity; initFrequencyList(); } // @param key, an integer // @param value, an integer // @return nothing public void set(int key, int value) { // Write your code here CacheNode currentNode = cache.get(key); if (currentNode == null) { if (cache.size() == maxCacheSize) { doEviction(); } LinkedHashSet<CacheNode> nodes = frequencyList[0]; currentNode = new CacheNode(key, value, 0); nodes.add(currentNode); cache.put(key, currentNode); lowestFrequency = 0; } else { currentNode.v = value; } addFrequency(currentNode); } public int get(int key) { // Write your code here CacheNode currentNode = cache.get(key); if (currentNode != null) { addFrequency(currentNode); return currentNode.v; } else { return -1; } } public void addFrequency(CacheNode currentNode) { int currentFrequency = currentNode.frequency; if (currentFrequency < maxFrequency) { int nextFrequency = currentFrequency + 1; LinkedHashSet<CacheNode> currentNodes = frequencyList[currentFrequency]; LinkedHashSet<CacheNode> newNodes = frequencyList[nextFrequency]; moveToNextFrequency(currentNode, nextFrequency, currentNodes, newNodes); cache.put(currentNode.k, currentNode); if (lowestFrequency == currentFrequency && currentNodes.isEmpty()) { lowestFrequency = nextFrequency; } } else { // Hybrid with LRU: put most recently accessed ahead of others: LinkedHashSet<CacheNode> nodes = frequencyList[currentFrequency]; nodes.remove(currentNode); nodes.add(currentNode); } } public int remove(int key) { CacheNode currentNode = cache.remove(key); if (currentNode != null) { LinkedHashSet<CacheNode> nodes = frequencyList[currentNode.frequency]; nodes.remove(currentNode); if (lowestFrequency == currentNode.frequency) { findNextLowestFrequency(); } return currentNode.v; } else { return -1; } } public int frequencyOf(int key) { CacheNode node = cache.get(key); if (node != null) { return node.frequency + 1; } else { return 0; } } public void clear() { for (int i = 0; i <= maxFrequency; i++) { frequencyList[i].clear(); } cache.clear(); lowestFrequency = 0; } public int size() { return cache.size(); } public boolean isEmpty() { return this.cache.isEmpty(); } public boolean containsKey(int key) { return this.cache.containsKey(key); } private void initFrequencyList() { for (int i = 0; i <= maxFrequency; i++) { frequencyList[i] = new LinkedHashSet<CacheNode>(); } } private void doEviction() { int currentlyDeleted = 0; double target = 1; // just one while (currentlyDeleted < target) { LinkedHashSet<CacheNode> nodes = frequencyList[lowestFrequency]; if (nodes.isEmpty()) { continue; } else { Iterator<CacheNode> it = nodes.iterator(); while (it.hasNext() && currentlyDeleted++ < target) { CacheNode node = it.next(); it.remove(); cache.remove(node.k); } if (!it.hasNext()) { findNextLowestFrequency(); } } } } private void moveToNextFrequency(CacheNode currentNode, int nextFrequency, LinkedHashSet<CacheNode> currentNodes, LinkedHashSet<CacheNode> newNodes) { currentNodes.remove(currentNode); newNodes.add(currentNode); currentNode.frequency = nextFrequency; } private void findNextLowestFrequency() { while (lowestFrequency <= maxFrequency && frequencyList[lowestFrequency].isEmpty()) { lowestFrequency++; } if (lowestFrequency > maxFrequency) { lowestFrequency = 0; } } private class CacheNode { public final int k; public int v; public int frequency; public CacheNode(int k, int v, int frequency) { this.k = k; this.v = v; this.frequency = frequency; } } }http://blog.csdn.net/qbt4juik/article/details/50633459
Maintain minFrequencyKey
public class LFUCache { class CacheEntry { private String data; private int frequency; // default constructor private CacheEntry() {} public String getData() { return data; } public void setData(String data) { this.data = data; } public int getFrequency() { return frequency; } public void setFrequency(int frequency) { this.frequency = frequency; } } private static int initialCapacity = 10; private static LinkedHashMap<Integer, CacheEntry> cacheMap = new LinkedHashMap<Integer, CacheEntry>(); /* LinkedHashMap is used because it has features of both HashMap and LinkedList. * Thus, we can get an entry in O(1) and also, we can iterate over it easily. * */ public LFUCache(int initialCapacity) { this.initialCapacity = initialCapacity; } public void addCacheEntry(int key, String data) { if(!isFull()) { CacheEntry temp = new CacheEntry(); temp.setData(data); temp.setFrequency(0); cacheMap.put(key, temp); } else { int entryKeyToBeRemoved = getLFUKey(); cacheMap.remove(entryKeyToBeRemoved); CacheEntry temp = new CacheEntry(); temp.setData(data); temp.setFrequency(0); cacheMap.put(key, temp); } } public int getLFUKey() { int key = 0; int minFreq = Integer.MAX_VALUE; for(Map.Entry<Integer, CacheEntry> entry : cacheMap.entrySet()) { if(minFreq > entry.getValue().frequency) { key = entry.getKey(); minFreq = entry.getValue().frequency; } } return key; } public String getCacheEntry(int key) { if(cacheMap.containsKey(key)) // cache hit { CacheEntry temp = cacheMap.get(key); temp.frequency++; cacheMap.put(key, temp); return temp.data; } return null; // cache miss } public static boolean isFull() { if(cacheMap.size() == initialCapacity) return true; return false; } }http://javarticles.com/2012/06/lfu-cache.html
O(1) for all of LFU cache operations, which include insertion, access and deletion(eviction)
It requires three data structures. One is a hash table which is used to cache the key/values so that given a key we can retrieve the cache entry at O(1). Second one is a double linked list for each frequency of access. The max frequency is capped at the cache size to avoid creating more and more frequency list entries. If we have a cache of max size 4 then we will end up with 4 different frequencies. Each frequency will have a double linked list to keep track of the cache entries belonging to that particular frequency.
The third data structure would be to somehow link these frequencies lists. It can be either an array or another linked list so that on accessing a cache entry it can be easily promoted to the next frequency list in time O(1). In our article it is based on array as traversing would be faster than linked list.
https://svn.apache.org/repos/asf/activemq/trunk/activemq-kahadb-store/src/main/java/org/apache/activemq/util/LFUCache.java
public class LFUCache<Key, Value> implements Map<Key, Value> { private final Map<Key, CacheNode<Key, Value>> cache; private final LinkedHashSet[] frequencyList; private int lowestFrequency; private int maxFrequency; // private final int maxCacheSize; private final float evictionFactor; public LFUCache(int maxCacheSize, float evictionFactor) { if (evictionFactor <= 0 || evictionFactor >= 1) { throw new IllegalArgumentException("Eviction factor must be greater than 0 and lesser than or equal to 1"); } this.cache = new HashMap<Key, CacheNode<Key, Value>>(maxCacheSize); this.frequencyList = new LinkedHashSet[maxCacheSize]; this.lowestFrequency = 0; this.maxFrequency = maxCacheSize - 1; this.maxCacheSize = maxCacheSize; this.evictionFactor = evictionFactor; initFrequencyList(); } public Value put(Key k, Value v) { Value oldValue = null; CacheNode<Key, Value> currentNode = cache.get(k); if (currentNode == null) { if (cache.size() == maxCacheSize) { doEviction(); } LinkedHashSet<CacheNode<Key, Value>> nodes = frequencyList[0]; currentNode = new CacheNode(k, v, 0); nodes.add(currentNode); cache.put(k, currentNode); lowestFrequency = 0; } else { oldValue = currentNode.v; currentNode.v = v; } return oldValue; } public void putAll(Map<? extends Key, ? extends Value> map) { for (Map.Entry<? extends Key, ? extends Value> me : map.entrySet()) { put(me.getKey(), me.getValue()); } } public Value get(Object k) { CacheNode<Key, Value> currentNode = cache.get(k); if (currentNode != null) { int currentFrequency = currentNode.frequency; if (currentFrequency < maxFrequency) { int nextFrequency = currentFrequency + 1; LinkedHashSet<CacheNode<Key, Value>> currentNodes = frequencyList[currentFrequency]; LinkedHashSet<CacheNode<Key, Value>> newNodes = frequencyList[nextFrequency]; moveToNextFrequency(currentNode, nextFrequency, currentNodes, newNodes); cache.put((Key) k, currentNode); if (lowestFrequency == currentFrequency && currentNodes.isEmpty()) { lowestFrequency = nextFrequency; } } else { // Hybrid with LRU: put most recently accessed ahead of others: LinkedHashSet<CacheNode<Key, Value>> nodes = frequencyList[currentFrequency]; nodes.remove(currentNode); nodes.add(currentNode); } return currentNode.v; } else { return null; } } public Value remove(Object k) { CacheNode<Key, Value> currentNode = cache.remove(k); if (currentNode != null) { LinkedHashSet<CacheNode<Key, Value>> nodes = frequencyList[currentNode.frequency]; nodes.remove(currentNode); if (lowestFrequency == currentNode.frequency) { findNextLowestFrequency(); } return currentNode.v; } else { return null; } } public int frequencyOf(Key k) { CacheNode<Key, Value> node = cache.get(k); if (node != null) { return node.frequency + 1; } else { return 0; } } public void clear() { for (int i = 0; i <= maxFrequency; i++) { frequencyList[i].clear(); } cache.clear(); lowestFrequency = 0; } public Set<Key> keySet() { return this.cache.keySet(); } public Collection<Value> values() { return null; //To change body of implemented methods use File | Settings | File Templates. } public Set<Entry<Key, Value>> entrySet() { return null; //To change body of implemented methods use File | Settings | File Templates. } public int size() { return cache.size(); } public boolean isEmpty() { return this.cache.isEmpty(); } public boolean containsKey(Object o) { return this.cache.containsKey(o); } public boolean containsValue(Object o) { return false; //To change body of implemented methods use File | Settings | File Templates. } private void initFrequencyList() { for (int i = 0; i <= maxFrequency; i++) { frequencyList[i] = new LinkedHashSet<CacheNode<Key, Value>>(); } } private void doEviction() { int currentlyDeleted = 0; float target = maxCacheSize * evictionFactor; while (currentlyDeleted < target) { LinkedHashSet<CacheNode<Key, Value>> nodes = frequencyList[lowestFrequency]; if (nodes.isEmpty()) { throw new IllegalStateException("Lowest frequency constraint violated!"); } else { Iterator<CacheNode<Key, Value>> it = nodes.iterator(); while (it.hasNext() && currentlyDeleted++ < target) { CacheNode<Key, Value> node = it.next(); it.remove(); cache.remove(node.k); } if (!it.hasNext()) { findNextLowestFrequency(); } } } } private void moveToNextFrequency(CacheNode<Key, Value> currentNode, int nextFrequency, LinkedHashSet<CacheNode<Key, Value>> currentNodes, LinkedHashSet<CacheNode<Key, Value>> newNodes) { currentNodes.remove(currentNode); newNodes.add(currentNode); currentNode.frequency = nextFrequency; } private void findNextLowestFrequency() { while (lowestFrequency <= maxFrequency && frequencyList[lowestFrequency].isEmpty()) { lowestFrequency++; } if (lowestFrequency > maxFrequency) { lowestFrequency = 0; } } private static class CacheNode<Key, Value> { public final Key k; public Value v; public int frequency; public CacheNode(Key k, Value v, int frequency) { this.k = k; this.v = v; this.frequency = frequency; } }
http://dhruvbird.com/lfu.pdf
http://techinpad.blogspot.com/2015/10/similar-to-lru-implement-lfu-which-is.html
- class LFUCache{
- unordered_map<int, multimap<int, pair<int,int>>::iterator> mMap;
- multimap<int, pair<int,int>> mList;
- int mCap;
- public:
- LFUCache(int capacity) {
- mCap = capacity;
- }
- void moveToHead(int key) {
- multimap<int, pair<int,int>>::iterator it = mMap[key];
- pair<int, pair<int,int>> n(it->first+1, pair<int,int>(it->second.first, it->second.second));
- mList.erase(it);
- mMap[key] = mList.insert(n);
- }
- int get(int key) {
- if(!mMap.count(key)) return -1;
- moveToHead(key);
- return mMap[key]->second.second;
- }
- void set(int key, int value) {
- if(mMap.count(key)) {
- mMap[key]->second.second = value;
- moveToHead(key);
- return;
- }
- if(mList.size() == mCap) {
- auto it = mList.begin();
- mMap.erase(it->second.first);
- mList.erase(it);
- }
- pair<int,pair<int,int>> n(1, pair<int,int>(key, value));
- mMap[key] = mList.insert(n);
- }
- };
in LRU cache:
use hashmap to achieve a O(1) lookup.
use deque to maintain order by access.
use deque to maintain order by access.
in LFU cache:
use hashmap to achieve a O(1) lookup.
use double-linked list to maintain frequency
use hashmap to achieve a O(1) lookup.
use double-linked list to maintain frequency
Here is a thought, for LRU cache, the current and next “order” are easily maintainable by deque, in the case of LRU, you essentially need to:
1) remove head (when cache expires)
2) delete an element, and append it to the end. (when an item is accessed).
2) delete an element, and append it to the end. (when an item is accessed).
For LFU cache, you need:
1) remove head (when cache expires)
2) move the item the next frequency slot ( when an item is accessed).
1) remove head (when cache expires)
2) move the item the next frequency slot ( when an item is accessed).
The data structures are chosen because they fit a specific problem.
Now, I think there is a use case that’s a combination of LFU/LRU, the question is like:
How to get top 10 most traded stocks from continuous feed of stock trade in last hour?
So we can:
Use Deque to maintain the access order
Use Double-linked-list to maintain the frequency order
Use HashMap for fast lookup.
http://dhruvbird.com/lfu.pdfUse Double-linked-list to maintain the frequency order
Use HashMap for fast lookup.