Friday, 16 February 2018

LRU Cache With constant time complexity

Below is the code implementation for a LRU cache with constant time complexity. We use a doubly linked list to maintain the access order of the cache entries. Caches are served from in memoryLinkedHasMap. The doubly linked list maintains least recently access nodes from left side and most recently used in right side.

lruEntry -> A ->B ->C ->D ->mruEntry.


package com.test.lrucache;

public class CacheEntry<K, V> {

private CacheEntry<K, V> next;
private CacheEntry<K, V> previous;
private K key;
private V value;

public CacheEntry(CacheEntry<K, V> previous, CacheEntry<K, V> next, K key, V value) {
super();
this.next = next;
this.previous = previous;
this.key = key;
this.value = value;
}

public CacheEntry<K, V> getNext() {
return next;
}

public void setNext(CacheEntry<K, V> next) {
this.next = next;
}

public CacheEntry<K, V> getPrevious() {
return previous;
}

public void setPrevious(CacheEntry<K, V> previous) {
this.previous = previous;
}

public K getKey() {
return key;
}

public void setKey(K key) {
this.key = key;
}

public V getValue() {
return value;
}

public void setValue(V value) {
this.value = value;
}
}



package com.test.lrucache;

import java.util.LinkedHashMap;
import java.util.Map;

public class LRUCache<K, V> {

private int maxSize;
private Map<K, CacheEntry<K, V>> listPointerMap;
private CacheEntry<K, V> lruEntry;
private CacheEntry<K, V> mruEntry;

public LRUCache(int maxSize) {
this.maxSize = maxSize;
listPointerMap = new LinkedHashMap<>();
// least recently used entry is null
lruEntry = null;
// most recently used entry is also null
mruEntry = lruEntry;
}

public void putCacheEntry(K key, V value) {

if (isFull()) {
// remove from map the left most node
listPointerMap.remove(lruEntry.getKey());
// remove the leftmost node
lruEntry = lruEntry.getNext();
lruEntry.setPrevious(null);
}

CacheEntry<K, V> newEntry = new CacheEntry<K, V>(mruEntry, null, key, value);
if (mruEntry != null) {
mruEntry.setNext(newEntry);
}
if (lruEntry == null) {
lruEntry = mruEntry;
}
mruEntry = newEntry;
listPointerMap.put(key, newEntry);
}

public V getCacheEntry(K key) {

CacheEntry<K, V> entry = listPointerMap.get(key);
// no entry found
if (entry == null) {
return null;
}

// last or most recently used entry so do nothing
if (entry.getKey() == mruEntry.getKey()) {
return entry.getValue();
}

CacheEntry<K, V> nextEntry = entry.getNext();
CacheEntry<K, V> previousEntry = entry.getPrevious();

// first or least recently used entry
if (entry.getKey() == lruEntry.getKey()) {
nextEntry.setPrevious(null);
lruEntry = nextEntry;
}

// somewhere in between
else if (entry.getKey() != mruEntry.getKey()) {
previousEntry.setNext(nextEntry);
nextEntry.setPrevious(previousEntry);
;
}

// Finally move our item to the MRU
entry.setPrevious(mruEntry);
mruEntry.setNext(entry);
mruEntry = entry;
mruEntry.setNext(null);

return entry.getValue();
}

private boolean isFull() {
return listPointerMap.size() == maxSize ? true : false;
}

}

package com.test.lrucache;

public class MainClass {

public static void main(String[] args) {

LRUCache<String, String> cache = new LRUCache<>(3);

cache.putCacheEntry("1", "One");
cache.putCacheEntry("2", "Two");
cache.putCacheEntry("3", "Three");

System.out.println(cache.getCacheEntry("1"));
System.out.println(cache.getCacheEntry("2"));

cache.putCacheEntry("4", "Four");
System.out.println(cache.getCacheEntry("3"));
System.out.println(cache.getCacheEntry("4"));
}

}

Thursday, 15 February 2018

FLU Cache

In this blogpost we will se how we can implement a LFU cache in Java.

package com.test.lfucache;

public class CacheEntry<T> {

private T data;
private int frequency;

public CacheEntry() {
}

public CacheEntry(T data, int frequency) {
super();
this.data = data;
this.frequency = frequency;
}

public T getData() {
return data;
}

public void setData(T data) {
this.data = data;
}

public int getFrequency() {
return frequency;
}

public void setFrequency(int frequency) {
this.frequency = frequency;
}


}

The above class represents the cache entry. Each entry has a frequency associated with it which will help us evicting cache object.


package com.test.lfucache;

import java.util.LinkedHashMap;
import java.util.Map;

public class LFUCache<K,V> {
private int maxSize = 10;
private Map<K, CacheEntry<V>> cacheMap;

public LFUCache(int maxSize) {
this.maxSize = maxSize;
cacheMap = new LinkedHashMap<>();
}

public void putCacheEntry(K key, V value) {
if (isFull()) {
K cacheKey = evictCache();
cacheMap.remove(cacheKey);
}
CacheEntry<V> entry = new CacheEntry<>();
entry.setData(value);
entry.setFrequency(0);
cacheMap.put(key, entry);
}

public V getCacheEntry(K key) {
if (cacheMap.containsKey(key)) {
CacheEntry<V> temp = cacheMap.get(key);
temp.setFrequency(temp.getFrequency() + 1);
cacheMap.put(key, temp);
return temp.getData();
}
return null;
}

private boolean isFull() {
return cacheMap.size() == maxSize ? true : false;
}

private K evictCache() {
int minFrequency = Integer.MAX_VALUE;
K key = null;
for (Map.Entry<K, CacheEntry<V>> entry : cacheMap.entrySet()) {
if (entry.getValue().getFrequency() < minFrequency) {
key = entry.getKey();
}
}
return key;
}
}

The above class is the main cache class. We have used a LinkedHashMap to store key and cache entry. We can use ConcurrentHashMap also for storing the key and cache entry. Finally the main class looks like

package com.test.lfucache;

public class MainClass {

public static void main(String[] args) {
LFUCache<String, String> cache = new LFUCache<>(3);
cache.putCacheEntry("1", "One");
cache.putCacheEntry("2", "Two");
cache.putCacheEntry("3", "Three");
System.out.println(cache.getCacheEntry("1"));
System.out.println(cache.getCacheEntry("2"));
cache.putCacheEntry("4", "Four");
System.out.println(cache.getCacheEntry("3"));
System.out.println(cache.getCacheEntry("4"));
}

}

Output :

One
Two
null
Four

The above LFU cache has an O(n) runtime complexity for eviction so worst case data insertion into cache. We can improve the performance with the help of another Map and LinkedList.