package org.fujene.communicate.indexer.term;

import java.util.Map.Entry;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.fujene.communicate.hash.Hashing;
import org.fujene.communicate.lock.IndexableSwitch;
import org.fujene.communicate.node.Node;
import org.fujene.communicate.rpc.Communicator;
import org.fujene.logging.LogParser;

public class TermIndexerHost implements Runnable{
	private String newTerm;
	private static int replication;
	private static Node myNode;
	private static SortedSet<Node> nodes;
	private static LinkedBlockingQueue<TermPackage> hostQueue;
	private static ConcurrentHashMap<Node, TermPackage> suspendQueue;
	private Communicator communicator;
	private int retryCount = 2;

	static{
		hostQueue = new LinkedBlockingQueue<TermPackage>();
		suspendQueue = new ConcurrentHashMap<Node, TermPackage>();
	}

	public TermIndexerHost(){
		communicator = new Communicator();
	}

	public static void property(int repl, Node ownNode, SortedSet<Node> list){
		replication = repl;
		myNode = ownNode;
		nodes = list;
	}

	public static void addToQueue(TermPackage newIndex){
		hostQueue.add(newIndex);
	}

	public static boolean isEmpty(){
		return hostQueue.isEmpty();
	}

	public static int residue(){
		return hostQueue.size();
	}

	public static void resumeQueue(Node liveNode){
		for(Entry<Node, TermPackage> entries: suspendQueue.entrySet()){
			if(entries.getKey() == liveNode){
				hostQueue.add(entries.getValue());
				suspendQueue.remove(entries);
			}
		}
	}

	// Drop from suspend queue
	public static void nodeDead(Node deadNode){
		for(Entry<Node, TermPackage> entries: suspendQueue.entrySet()){
			if(entries.getKey() == deadNode) suspendQueue.remove(entries);
		}
	}

	@Override
	public void run(){
		long time = 0;
		TermPackage newIndex = null;

		while(true){
			try{
				newIndex = hostQueue.take();
				time = System.nanoTime();
			}catch(InterruptedException e1){
				continue;
			}
			long hash = Hashing.getTermHash(newIndex.term());
			time = LogParser.timerFinest("TERM: calculate term hash", time);
			TreeMap<Long, Node> nearestHashes = new TreeMap<Long, Node>();
			Entry<Long, Node> currentEntry = null;
			for(Node eachNode: nodes)
				nearestHashes.put(eachNode.getAttributeHash(hash), eachNode);
			currentEntry = nearestHashes.ceilingEntry(Long.valueOf(hash));
			if(currentEntry == null) currentEntry = nearestHashes.firstEntry();
			Node nearest = currentEntry.getValue();
			time = LogParser.timerFinest("TERM: select node", time);
			int cluster = nodes.headSet(nearest).size();
			newIndex.setCluster(cluster);
			LogParser.timerFinest("TERM: select cluster", time);

			try{
				IndexableSwitch.waitIfLocked();
			}catch(InterruptedException e){}
			for(int i = 0; i < (replication > nodes.size()? nodes.size(): replication); i++){
				if(i != 0) currentEntry = nearestHashes.higherEntry(currentEntry.getKey());
				if(currentEntry == null) currentEntry = nearestHashes.firstEntry();

				time = System.currentTimeMillis();
				if(currentEntry.getValue().equals(myNode)){
					// local: Directly adds to client queue
					TermIndexerClient.addToQueue(newIndex);
					LogParser.timerFinest("TERM: Add to client queue", time);
				}else if(currentEntry.getValue().isSuspend()){
					suspendQueue.put(currentEntry.getValue(), newIndex);
					LogParser.timerFinest("TERM: Add to suspend queue", time);
				}else{
					// remote
					for(int j = 0; j < retryCount; j++){
						time = LogParser.initTime();
						try{
							communicator.indexTermRemote(currentEntry.getValue(), newIndex.getRemoteObject());
							LogParser.timerFinest("TERM: Add to remote queue", time);
							break;
						}catch(Exception e){
							LogParser.warning("Can not index term \"" + newTerm
								+ "\", probably due to server down.");
							LogParser.warning("Retry indexing on another node.");
						}
						LogParser.timerFinest("Index / store term remotely", time);
					}
				}
			}
		}
	}
}
