/**
 * 
 */
package edu.umd.clip.lm.util;

import java.lang.ref.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.*;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 * The cache keeps at most <b>hardCapacity</b> hard references,
 * at most <b>softCapacity</b> soft references, and unlimited number of weak references
 * 
 * TODO: the hash weak references is not bounded.
 *       concurrency is not great, key level locking would be best
 *       add cache resizing
 */
public abstract class LRUCache<K, T> {
	private LRU<K,T> hardLRU;
	//private HashMap<K, T> hardCache;
	private final LRU<K,SoftReference<T>> softLRU;
	//private HashMap<K, SoftReference<T>> softCache;
	private final HashMap<K, WeakReference<T>> weakCache;
	private AtomicLong hits = new AtomicLong();
	private AtomicLong misses = new AtomicLong();
	private AtomicLong requests = new AtomicLong();
	private boolean enabled = true;
	private Lock lock;
	private HashMap<K, PendingRequest<T>> pendingRequests;
	private long lastCleanup;
	public static long CLEANUP_INTERVAL = 600000; 
	private AtomicLong pendingRequestCount = new AtomicLong();
	private AtomicLong delayedLockCount = new AtomicLong();
	private AtomicLong totalLockWaitTime = new AtomicLong();
	private AtomicLong cleanupTime = new AtomicLong();
	private AtomicLong getTime = new AtomicLong();
	private AtomicLong putTime = new AtomicLong();
	//public long evictionTime = 0;
	
	private final boolean useWeakCache;
	private final boolean useSoftCache;
	
	public LRUCache(int hardCapacity, int softCapacity) {
		this(hardCapacity, softCapacity, false);
	}
	
	public LRUCache(int hardCapacity, int softCapacity, boolean useWeakCache) {
		//assert(hardCapacity <= softCapacity);
		hardLRU = new LRU<K,T>(hardCapacity);
		useSoftCache = softCapacity > hardCapacity;
		if (useSoftCache) {
			softLRU = new LRU<K,SoftReference<T>>(softCapacity);
			this.useWeakCache = useWeakCache;
		} else {
			softLRU = null;
			this.useWeakCache = false;
		}
		if (this.useWeakCache) {
			weakCache = new HashMap<K, WeakReference<T>>(softCapacity);
		} else {
			weakCache = null;
		}
		lock = new ReentrantLock();
		pendingRequests = new HashMap<K, PendingRequest<T>>();
		lastCleanup = System.currentTimeMillis();
	}
	
	abstract protected T loadItem(K key);
	
	static class PendingRequest<T> {
		Condition condition;
		T item;
		boolean fulfilled;
		
		public PendingRequest(Condition condition) {
			this.condition = condition;
			item = null;
			fulfilled = false;
		}
		public Condition getCondition() {
			return condition;
		}
		public void setCondition(Condition condition) {
			this.condition = condition;
		}
		public T getItem() {
			return item;
		}
		public void setItem(T item) {
			this.item = item;
			this.fulfilled = true;
		}
		public boolean isFulfilled() {
			return fulfilled;
		}
	}
	
	private void getLock() {
		lock.lock();
		/*
		while(true) {
			try {
				if (lock.tryLock(1, TimeUnit.MINUTES)) return;
				
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
			System.err.printf("%s tried and failed to obtain the lock %s\n", Thread.currentThread().getName(), lock.toString());
		}
		*/
	}
	
	// like getItem() except it doesn't call the backend in case of a miss
	public T peekItem(K key) {
		getLock();
		T item = hardLRU.get(key);
		if (item == null) {
			if (useSoftCache) {
				SoftReference<T> softRef = softLRU.get(key); 
				if (softRef != null) item = softRef.get();
			}
			if (item == null) {
				if (useWeakCache) {
					WeakReference<T> weakRef = weakCache.get(key); 
					if (weakRef != null) item = weakRef.get();
				}
			}
		}
		requests.incrementAndGet();
		if (item != null) {
			hits.incrementAndGet();
		}
		lock.unlock();
		return item;
	}
	
	public boolean containsItem(K key) {
		T item = peekItem(key);
		return item != null;
	}
	
	// for asynchronous implementations 
	protected void putItem(K key, T item) {
		getLock();
		
		hardLRU.put(key, item);
		if (useSoftCache) {
			softLRU.put(key, new SoftReference<T>(item));
			if (useWeakCache) {
				weakCache.put(key, new WeakReference<T>(item));
			}
		}
		
		PendingRequest<T> pending = pendingRequests.get(key);
		if (pending != null) {
			pending.item = item;
			pending.condition.signalAll();
		}
		lock.unlock();
	}
	
	public T getItem(K key) {
		if (!enabled) {
			return loadItem(key);
		}
		
		getLock();
		requests.incrementAndGet();
		
		// check if someone is already loading the value
		PendingRequest<T> pending = pendingRequests.get(key);
		if (pending != null) {
			pendingRequestCount.incrementAndGet();
			while (!pending.isFulfilled()) {
				try {
					//System.out.printf("%s: waiting for cluster #%s\n", Thread.currentThread().toString(), key.toString());
					pending.condition.await();
					//System.out.printf("%s: awaken on cluster #%s\n", Thread.currentThread().toString(), key.toString());
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
			}
			hits.incrementAndGet();
			lock.unlock();
			return pending.item;
		}

		if (System.currentTimeMillis() - lastCleanup > CLEANUP_INTERVAL) {
			cleanup();
		}
		
		long start = System.nanoTime();
		long end = 0;
		boolean missed = false;
		T item = hardLRU.get(key);
		if (item == null) {
			if (useSoftCache) {
				SoftReference<T> softRef = softLRU.get(key); 
				if (softRef != null) item = softRef.get();
			}
			if (item == null) {
				
				WeakReference<T> weakRef = null;
				if (useWeakCache) {
					weakRef = weakCache.get(key); 
					if (weakRef != null) item = weakRef.get();
				}
				if (item == null) {
					misses.incrementAndGet();
					missed = true;
					pending = new PendingRequest<T>(lock.newCondition());
					pendingRequests.put(key, pending);
					//System.out.printf("%s: adding cluster #%s\n", Thread.currentThread().toString(), key.toString());
					end = System.nanoTime();
					getTime.addAndGet(end - start);
					lock.unlock();
					
					item = loadItem(key);
					
					getLock();
					
					start = System.nanoTime();
					pending.setItem(item);
					
					//System.out.printf("%s: waking up for cluster #%s\n", Thread.currentThread().toString(), key.toString());
					pending.condition.signalAll();
					pendingRequests.remove(key);
					
					if (item == null) {
						lock.unlock();
						return null;
					}
					if (useWeakCache) {
						weakCache.put(key, new WeakReference<T>(item));
					}
				}
				if (useSoftCache) {
					softLRU.put(key, new SoftReference<T>(item));
				}
			}
			hardLRU.put(key, item);
		}
		end = System.nanoTime();
		putTime.addAndGet(end - start);
		/*
		start = end;
		K evicted[] = softLRU.touch(key);
		if (evicted != null) {
			for(K k : evicted) {
				softCache.remove(k);
			}
		}
		evicted = hardLRU.touch(key);
		if (evicted != null) {
			for(K k : evicted) {
				hardCache.remove(k);
			}
		}
		end = System.nanoTime();
		evictionTime += end - start;
		*/
		if (!missed) hits.incrementAndGet();
		lock.unlock();
		return item;
	}
	
	public void cleanup() {
		if (!useWeakCache) return;
		lock.lock();
		long start = System.nanoTime();
		for(Iterator<Map.Entry<K, WeakReference<T>>> i = weakCache.entrySet().iterator(); i.hasNext();) {
			Map.Entry<K, WeakReference<T>> entry = i.next();
			if (entry.getValue().get() == null) {
				i.remove();
			}
			lastCleanup = System.currentTimeMillis();
		}
		long end = System.nanoTime();
		cleanupTime.addAndGet(end - start);
		lock.unlock();
	}
	
	public float getHitRatio() {
		float result = hits.floatValue() / requests.longValue();
		return result;
	}
	
	public float getMissRatio() {
		float result = misses.floatValue() / requests.longValue();
		return result;
	}

	public long getHits() {
		return hits.longValue();
	}

	public long getMisses() {
		return misses.longValue();
	}

	public long getRequests() {
		return requests.longValue();
	}

	public long getPending() {
		return pendingRequestCount.longValue();
	}
	
	public long getDelayed() {
		return delayedLockCount.longValue();
	}
	
	public long getLockWaitTime() {
		return totalLockWaitTime.longValue();
	}
	
	public boolean isEnabled() {
		return enabled;
	}

	public void setEnabled(boolean enabled) {
		this.enabled = enabled;
	}
	
	public String getCacheStats() {
		int hardSize = hardLRU.size();
		int softSize = useSoftCache ? softLRU.size() : 0;
		int weakSize = useWeakCache ? weakCache.size() : 0;
		return String.format("%f%% hits of %d, hard=%d, soft=%d, weak=%d, pending=%d, %d locks waited for %fs, get %fs, put %fs", 
				getHitRatio(), requests.longValue(), hardSize, softSize, weakSize, pendingRequestCount.longValue(), delayedLockCount.longValue(), 
				totalLockWaitTime.doubleValue() / 1000000000,
				getTime.doubleValue() / 1000000000,
				putTime.doubleValue() / 1000000000
				);
	}
}
