package org.fujene.communicate.indexer;

import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;

import org.fujene.communicate.hash.VirtualContentHash;
import org.fujene.communicate.lock.IndexableSwitch;
import org.fujene.communicate.node.Node;
import org.fujene.communicate.repli.ReplicateCommunicator;
import org.fujene.logging.LogParser;
import org.fujene.structure.Queue;
import org.msgpack.object.ArrayType;

public class ContentIndexerHost implements Runnable{
	private static int replication;
	private static CopyOnWriteArrayList<Node> nodes;
	private static Node myNode;
	private static LinkedBlockingQueue<Queue> hostQueue;
	private static ConcurrentHashMap<Node, Queue> suspendQueue;
	private int retryCount;

	static{
		hostQueue = new LinkedBlockingQueue<Queue>();
		suspendQueue = new ConcurrentHashMap<Node, Queue>();
	}

	public ContentIndexerHost(){}

	public static void property(int repl, Node ownNode, CopyOnWriteArrayList<Node> list){
		replication = repl;
		myNode = ownNode;
		nodes = list;
	}

	public static void addQueue(Queue newIndex){
		hostQueue.offer(newIndex);
	}

	public static boolean isEmpty(){
		return hostQueue.isEmpty();
	}

	public static int residue(){
		return hostQueue.size();
	}

	public static void resumeQueue(Node liveNode){
		for(Entry<Node, Queue> entries: suspendQueue.entrySet()){
			if(entries.getKey() == liveNode){
				hostQueue.add(entries.getValue());
				suspendQueue.remove(entries);
			}
		}
	}

	// Drop from suspend queue
	public static void nodeDead(Node deadNode){
		for(Entry<Node, Queue> entries: suspendQueue.entrySet()){
			if(entries.getKey() == deadNode) suspendQueue.remove(entries);
		}
	}

	@Override
	public void run(){
		retryCount = 2;
		long startTime = 0;
		long hash;
		TreeMap<Long, Node> nearestHashes = new TreeMap<Long, Node>();
		Entry<Long, Node> currentEntry = null;
		Queue newIndex = null;

		while(true){
			try{
				newIndex = hostQueue.take();
			}catch(InterruptedException e1){
				continue;
			}
			hash = VirtualContentHash.getContentHash(newIndex.ID);
			for(Node eachNode: nodes)
				nearestHashes.put(eachNode.getAttributeHash(hash), eachNode);
			startTime = LogParser.initTime();

			LogParser.timerFinest("Index / calc node", startTime);
			ArrayType contentObjs = ReplicateCommunicator.makeRemoteContentObject(newIndex);

			// Stop to post to the client queues, due to keep replication in
			// time
			try{
				IndexableSwitch.waitIfLocked();
			}catch(InterruptedException e){}
			for(int i = 0; i < (replication > nodes.size() ? nodes.size() : replication); i++){
				if(currentEntry == null) currentEntry = nearestHashes.ceilingEntry(Long.valueOf(hash));
				else currentEntry = nearestHashes.higherEntry(currentEntry.getKey());
				if(currentEntry == null) currentEntry = nearestHashes.firstEntry();

				LogParser.fine("Store content to " + currentEntry.getValue().getAddr());
				if(currentEntry.getValue() == myNode){
					// local: Directly adds to client queue
					ContentIndexerClient.addToQueue(newIndex);
				}else if(currentEntry.getValue().isSuspend()){
					suspendQueue.put(currentEntry.getValue(), newIndex);
				}else{
					// remote
					startTime = LogParser.initTime();
					for(int j = 0; j < retryCount; j++){
						try{
							ReplicateCommunicator.indexContentRemote(contentObjs, newIndex.ID,
									currentEntry.getValue());
							break;
						}catch(Exception e){
							LogParser.warning("Can not index " + newIndex.ID + " due to server down.");
							LogParser.warning("Retry to index in next node.");
						}
					}
					LogParser.timerFinest("Index / Posting to remote", startTime);
				}
			}
		}
	}
}
