package org.fujene.communicate.repli;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.logging.Level;

import org.fujene.communicate.deleter.DeleterClient;
import org.fujene.communicate.deleter.DeleterHost;
import org.fujene.communicate.hash.HashCompareTool;
import org.fujene.communicate.hash.VirtualContentHash;
import org.fujene.communicate.indexer.ContentIndexerClient;
import org.fujene.communicate.indexer.ContentIndexerHost;
import org.fujene.communicate.indexer.TermIndexerClient;
import org.fujene.communicate.indexer.TermIndexerHost;
import org.fujene.communicate.lock.IndexableSwitch;
import org.fujene.communicate.node.Node;
import org.fujene.communicate.repli.console.ReplicateConsole;
import org.fujene.communicate.searcher.SearcherHost;
import org.fujene.constants.Constants;
import org.fujene.index.ReplicateIndexSystem;
import org.fujene.logging.LogParser;
import org.fujene.memman.ReplicateMemoryManager;
import org.fujene.memman.backup.ContentSnapshoter;
import org.fujene.memman.backup.InvertIndexSnapshoter;
import org.fujene.memman.backup.TermSnapshoter;
import org.fujene.memman.rank.ReplicateReranker;
import org.fujene.search.ReplicateSearchSystem;
import org.fujene.structure.ContentInfo;
import org.fujene.structure.Queue;
import org.fujene.structure.ExtendResult;
import org.fujene.structure.ExtendTerm;
import org.fujene.structure.HashIDExt;
import org.fujene.structure.LookupPointer;
import org.fujene.structure.ReplicateInfo;
import org.fujene.structure.SearchQueue;
import org.fujene.structure.SearcherQueue;
import org.fujene.structure.Term;
import org.fujene.structure.TermIndexee;
import org.fujene.structure.TermList;
import org.fujene.term.Chopper;
import org.fujene.term.SenChopper;
import org.fujene.term.WordBigramChopper;
import org.fujene.toolbox.ByteConverter;
import org.fujene.weight.StandardWeight;
import org.msgpack.MessagePackObject;
import org.msgpack.object.ArrayType;
import org.msgpack.object.IntegerType;
import org.msgpack.object.RawType;
import org.msgpack.rpc.Server;
import org.msgpack.rpc.loop.EventLoop;

public final class ReplicatePeerSystem{
	// MemoryManager -> Memory, Disk
	private ReplicateMemoryManager memman;

	public boolean stopFlag = false;

	private ExecutorService execIndex;
	private ExecutorService execSearch;
	private ExecutorService execBeacon;
	private Thread beaconThread;
	private Thread[] indexThreads;
	private int indexThreadNum;
	private int searchThreadNum;

	private String directory;
	private int blockNum;
	private int blockSize;

	// //////// P2P ///////////
	private int peerport = Constants.PEER_PORT;
	private int indexport = Constants.INDEX_PORT;
	private int searchport = Constants.SEARCH_PORT;

	private int replication;
	private int virtualNodes;
	private int rankType;
	private int beaconInterval;
	private int beaconThreshold;
	private int backupInterval;

	private CopyOnWriteArrayList<Node> nodeList;
	private Node myNode;
	private String[] firstHost;
	private Server server;
	private Chopper chopper;
	private ContentInfo contentInfo;
	private ConcurrentSkipListMap<Long, Node> hashList;
	private ReplicateIndexSystem indexServer;
	private ReplicateSearchSystem searchServer;

	private boolean clientLog = false;
	private boolean isRerank = false;

	public void initialize() throws IOException, InterruptedException{
		nodeList = new CopyOnWriteArrayList<Node>();

		// get my IPv4 address
		Enumeration<NetworkInterface> enulfs = NetworkInterface.getNetworkInterfaces();
		if(!enulfs.hasMoreElements()) return;
		NetworkInterface ni = enulfs.nextElement();
		Enumeration<InetAddress> enuAddr = ni.getInetAddresses();
		if(!enuAddr.hasMoreElements()) return;
		// FIXME: choose IPv4/IPv6 by setting
		InetAddress addrInfo = enuAddr.nextElement();
		while(!(addrInfo instanceof Inet4Address))
			addrInfo = enuAddr.nextElement();

		String address = addrInfo.getHostAddress();
		Node.property(virtualNodes, beaconThreshold);
		myNode = Node.createLocal(address, peerport);
		nodeList.add(myNode);

		getInformation();
		// Initial value, if user don't set content set
		if(contentInfo == null) contentInfo = new ContentInfo(new String[]{ "content" }, new String[0]);

		server = new Server(EventLoop.defaultEventLoop());
		server.serve(new ReplicatePeerHandler(this, contentInfo.getContentNum(), contentInfo.getAppendixNum()));
		server.listen(peerport);

		memman = new ReplicateMemoryManager(blockSize, blockNum, directory, contentInfo, backupInterval);

		// temporary value, this values must exceed 6, should be multiplier of 6
		indexThreadNum = 12;
		searchThreadNum = 5;
		// Index threads
		execIndex = Executors.newFixedThreadPool(indexThreadNum);
		indexThreads = new Thread[indexThreadNum];
		for(int i = 0; i < indexThreads.length; i++){
			if(i % 6 == 0) indexThreads[i] = new Thread(new ContentIndexerHost());
			else if(i % 6 == 1) indexThreads[i] = new Thread(new ContentIndexerClient(clientLog));
			else if(i % 6 == 2) indexThreads[i] = new Thread(new TermIndexerHost());
			else if(i % 6 == 3) indexThreads[i] = new Thread(new TermIndexerClient(clientLog));
			else if(i % 6 == 4) indexThreads[i] = new Thread(new DeleterHost());
			else if(i % 6 == 5) indexThreads[i] = new Thread(new DeleterClient());
			indexThreads[i].setDaemon(true);
			execIndex.execute(indexThreads[i]);
		}

		// Search threads
		execSearch = Executors.newFixedThreadPool(searchThreadNum);

		execBeacon = Executors.newFixedThreadPool(1);
		beaconThread = new Thread(new BeaconService(myNode, nodeList, this, beaconInterval, 5 * 60));
		beaconThread.setDaemon(true);
		execBeacon.execute(beaconThread);

		searchServer = new ReplicateSearchSystem(this, searchport);
		searchServer.initService();
		indexServer = new ReplicateIndexSystem(this, contentInfo.getContentNum(),
				contentInfo.getAppendixNum(), indexport);
		indexServer.initService();

		// globally-use switch, only initializes at here
		IndexableSwitch.initialize();

		chopper = new SenChopper();

		if(firstHost != null){
			registerNode();
			callForData();
		}else refreshHashList();
	}

	private boolean getArgs(String[] args){
		boolean isEnded = false;
		String settingFile;

		if(args.length < 2){
			System.err.println("Usage: PeerSystem [--primary <SettingFile>] [--secondary <IPAddress ...>]");
			return isEnded;
		}
		// defaults
		this.blockNum = 256;
		this.blockSize = 1048576;
		this.directory = "./faredata/";
		this.replication = 1;
		this.virtualNodes = 1;
		this.rankType = 0;
		this.beaconInterval = 30;
		this.beaconThreshold = 1;
		this.backupInterval = 0;

		// local
		ArrayList<String> contentName = new ArrayList<String>();
		ArrayList<String> appendixName = new ArrayList<String>();
		int count = 0;

		if(args.length > 1 && args[0].equals("--primary")){
			settingFile = args[1];

			BufferedReader br = null;
			try{
				br = new BufferedReader(new FileReader(new File(settingFile)));
				// properties
				Properties props = new Properties();
				String propval = null;
				props.load(br);

				if((propval = props.getProperty("Directory")) != null) directory = propval;
				if(!directory.endsWith("/")) directory.concat("/");
				if(!(new File(directory).exists())) new File(directory).mkdir();
				// logger
				LogParser.setParser(Level.ALL.getName(), directory);

				if((propval = props.getProperty("BlockNum")) != null) blockNum = Integer.parseInt(propval);
				if((propval = props.getProperty("BlockSize")) != null) blockSize = Integer.parseInt(propval);
				if((propval = props.getProperty("PeerMode")) != null)
					if(propval.equalsIgnoreCase("on")) replication = 3;
					else if(propval.equalsIgnoreCase("off")) replication = 1;
				if((propval = props.getProperty("ClientLog")) != null)
					if(propval.equalsIgnoreCase("on")) clientLog = true;
					else if(propval.equalsIgnoreCase("off")) clientLog = false;
				if((propval = props.getProperty("Replication")) != null)
					replication = Integer.parseInt(propval);
				if((propval = props.getProperty("IndexPort")) != null) indexport = Integer.parseInt(propval);
				if((propval = props.getProperty("SearchPort")) != null)
					searchport = Integer.parseInt(propval);
				if((propval = props.getProperty("PeerPort")) != null) peerport = Integer.parseInt(propval);
				if((propval = props.getProperty("BeaconInterval")) != null)
					beaconInterval = Integer.parseInt(propval);
				if((propval = props.getProperty("BeaconThreshold")) != null)
					beaconThreshold = Integer.parseInt(propval);
				if((propval = props.getProperty("BackupInterval")) != null)
					backupInterval = Integer.parseInt(propval);
				if((propval = props.getProperty("LogLevel")) != null){
					try{
						LogParser.changeLevel(Level.parse(propval));
					}catch(Exception e){
						LogParser.warning("Specified log-level does not exist.");
					}
				}
				if((propval = props.getProperty("Chopper")) != null){
					try{
						@SuppressWarnings("unchecked") Class<Chopper> cls = (Class<Chopper>)Class
								.forName(propval);
						Constructor<Chopper> construct = cls.getConstructor();
						chopper = construct.newInstance();
					}catch(ClassNotFoundException e){
						System.out.println("Class for chopper did not found. application will be shut down.");
					}catch(Exception e){
						throw new InternalError("Internal error.");
					}
				}
				if((propval = props.getProperty("VirtualNode")) != null)
					virtualNodes = Integer.valueOf(propval);
				if((propval = props.getProperty("Content")) != null) contentName.add(propval);
				if((propval = props.getProperty("Appendix")) != null) appendixName.add(propval);
				if((propval = props.getProperty("Rank")) != null){
					isRerank = true;
					if(propval.equalsIgnoreCase("TFIDF")) rankType = Constants.RERANK_TFIDF;
					else if(propval.equalsIgnoreCase("ID")) rankType = Constants.RERANK_ID;
					else rankType = Constants.RERANK_DEFAULT;
				}
				if((propval = props.getProperty("Connection")) != null){
					// make links between contents and appendices
					if(contentInfo == null)
						contentInfo = new ContentInfo(contentName.toArray(new String[0]),
								appendixName.toArray(new String[0]));
					String[] values = propval.split(",");
					int[] indices = new int[values.length];
					for(int i = 0; i < indices.length; i++)
						indices[i] = Integer.parseInt(values[i]);
					contentInfo.connectGraph(count++, indices);
				}
				isEnded = true;
			}catch(FileNotFoundException e){
				LogParser.severe("Please input appropriate file. Shutdown.");
				return isEnded;
			}catch(IOException ignored){}finally{
				if(br != null) try{
					br.close();
				}catch(IOException e){}
			}
		}else if(args.length == 0) isEnded = true;

		// Secondary mode
		if(args.length > 1 && args[0].equals("--secondary")){
			firstHost = new String[args.length - 1];
			for(int i = 1; i < args.length; i++)
				firstHost[i - 1] = args[i];
			// logger
			try{
				LogParser.setParser(Level.WARNING.getName(), "faredata/faredb.log");
			}catch(SecurityException e){}catch(IOException e){}

			isEnded = true;
		}

		return isEnded;
	}

	public static void main(String[] args) throws InterruptedException, IOException{
		ReplicatePeerSystem mainSystem = new ReplicatePeerSystem();
		if(!mainSystem.getArgs(args)) return;
		mainSystem.initialize();
		mainSystem.setProperties();
		IndexableSwitch.unlock();
		new ReplicateConsole(mainSystem).console();
	}

	private void setProperties(){
		TermIndexerHost.property(replication, myNode, nodeList);
		TermIndexerClient.property(memman);
		ContentIndexerHost.property(replication, myNode, nodeList);
		ContentIndexerClient.property(memman);
		DeleterHost.property(replication, myNode, nodeList);
		DeleterClient.property(memman);
		SearcherHost.property(replication, memman, myNode, nodeList);
		HashCompareTool.registerLists(nodeList, myNode, replication);
	}

	// Tester's startup method
	public void testMain(String[] args) throws InterruptedException, IOException{
		this.blockNum = 32;
		this.blockSize = 1048576;
		this.directory = "./testDir/";
		this.replication = 1;
		this.virtualNodes = 1;
		this.rankType = 0;
		this.beaconInterval = 30;
		this.beaconThreshold = 1;
		this.backupInterval = 0;
		LogParser.setParser(Level.WARNING.getName(), "testDir/faredb.log");
		this.initialize();
		this.chopper = new WordBigramChopper();
		this.setProperties();
		IndexableSwitch.unlock();

		replication = 1;
	}

	// Process to stop all works
	public void setStopper(boolean isStop){
		System.out.println("Shutting down...");
		stopFlag = isStop;
		searchServer.stopService();
		indexServer.stopService();
		execIndex.shutdownNow();
		execSearch.shutdownNow();
		execBeacon.shutdownNow();

		for(Node eachNode: nodeList)
			if(!eachNode.equals(myNode)) ReplicateCommunicator.finalizeProcess(eachNode, myNode);
		nodeList = null;
		// FIXME: Better way to stop gracefully
		Runtime.getRuntime().exit(0);
	}

	public void setNewIndex(Queue newIndex){
		ContentIndexerHost.addQueue(newIndex);
		for(int i = 0; i < newIndex.contents.length; i++){
			long startTime = LogParser.initTime();
			TermList terms = chopper.chop(newIndex.contents[i]);
			LogParser.timerFinest("Index / Chop", startTime);
			for(Term term: terms)
				TermIndexerHost.addToQueue(TermIndexee.asValue(term.str(), newIndex.ID, term.freq(), i));
		}
	}

	public void setDelete(long ID){
		DeleterHost.addToQueue(ID);
	}

	// TODO: consider for multi thread searching
	// search process
	public ExtendResult setNewSearch(SearchQueue newQuery) throws Exception{
		long time = System.nanoTime();
		ExtendResult result = new ExtendResult(contentInfo);
		newQuery.typenum = contentInfo.getContentIndex(newQuery.typestr);
		if(newQuery.typenum == -1){
			System.out.println("Invalid type. We can recieve: ");
			for(int i = 0; i < contentInfo.getContentNum(); i++)
				System.out.println("\t" + contentInfo.getContentName(i));
			return null;
		}

		long dbgtime = LogParser.initTime();
		TermList terms = chopper.chop(newQuery.query);
		SearcherQueue structure = SearcherQueue.newSearcher(terms, 10);
		dbgtime = LogParser.timerFinest("Search / Chop", dbgtime);

		// starts search on specified node
		for(int i = 0; i < structure.getCount(); i++)
			execSearch.execute(new SearcherHost(structure, i, newQuery.typenum));
		dbgtime = LogParser.timerFinest("Search / Sending process", dbgtime);
		// intersection if all nodes finished search
		structure.waitLatch();
		LogParser.timerFinest("Search / Search terms", dbgtime);

		if(!structure.isNotFound()){
			try{
				dbgtime = LogParser.initTime();
				// intersection
				ArrayList<HashIDExt> hashes;
				if(terms.size() > 1){
					// result by multiple terms needs intersection
					hashes = intersection(newQuery.typenum, structure);
					dbgtime = LogParser.timerFinest("Search / Intersection", dbgtime);
					result.setTotal(hashes.size());
					if(newQuery.from + newQuery.length > hashes.size())
						newQuery.length = hashes.size() - newQuery.from;
					// get content
					result.setResult(getContents(hashes, newQuery.from, newQuery.length, newQuery.typenum),
							(System.nanoTime() - time) / 1000000000.0);
					dbgtime = LogParser.timerFinest("Search / Content", dbgtime);
				}else{
					// only one term need not to intersect, to get result faster
					hashes = getIndexAlone(structure.getTermHash(0), newQuery.typenum, newQuery.from,
							newQuery.length, structure.getResult(0));
					dbgtime = LogParser.timerFinest("Search / Get list", dbgtime);
					result.setTotal(structure.getResult(0).docfreq);
					// get content
					result.setResult(getContents(hashes, 0, hashes.size(), newQuery.typenum),
							(System.nanoTime() - time) / 1000000000.0);
					dbgtime = LogParser.timerFinest("Search / Content", dbgtime);
					if(structure.getResult(0).docfreq == hashes.size() && result.getLength() != hashes.size())
						result.setTotal(result.getLength());
				}

				if(isRerank) ReplicateReranker.rerank(result, rankType);
				LogParser.timerFinest("Search / Get content", dbgtime);
			}catch(Exception e){
				e.printStackTrace();
			}
		}else{
			System.out.println("Not found.");
			result.clearResult();
		}

		return result;
	}

	// size of each content list from remote node
	private static final int BUFFERSIZE = 100;

	// Get specified length of contents from local or remote.
	private LinkedHashMap<Long, Integer> getContentList(long hash, LookupPointer fromPtr, int length){
		LinkedHashMap<Long, Integer> hashMap = null;
		try{
			Entry<Long, Node> termEntry = hashList.floorEntry(Long.valueOf(hash));
			if(termEntry == null) termEntry = hashList.firstEntry();
			Node termNode = termEntry.getValue();
			if(termNode == myNode) hashMap = memman.getContentsByHash(fromPtr, length);
			else hashMap = ReplicateCommunicator.getInvertIndexList(termNode, fromPtr);

			return hashMap;
		}catch(Exception e){
			e.printStackTrace();
		}
		return null;
	}

	// Get all content list from remote node.
	private TreeMap<Long, Integer> getAllContentList(long hash, LookupPointer fromPtr){
		TreeMap<Long, Integer> retList = new TreeMap<Long, Integer>(new Comparator<Long>(){
			@Override
			public int compare(Long o1, Long o2){
				return (o1.longValue() < o2.longValue() ? 1 : (o1.longValue() > o2.longValue() ? -1 : 0));
			}
		});
		LinkedHashMap<Long, Integer> pieceMap;

		while(!fromPtr.equals(LookupPointer.EMPTY)){
			pieceMap = getContentList(hash, fromPtr, BUFFERSIZE);
			if(pieceMap != null) retList.putAll(pieceMap);
		}

		return retList;
	}

	// Get index just for one term.
	private ArrayList<HashIDExt> getIndexAlone(long termHash, int prior, int from, int length,
			ExtendTerm result){
		ArrayList<HashIDExt> results = new ArrayList<HashIDExt>(from + length + 1);
		result.invIndex[prior].firstID = result.firstIDs[prior];
		LinkedHashMap<Long, Integer> IDMap = getContentList(termHash, result.invIndex[prior], from + length);
		int count = 0;
		double tfidf;

		for(Entry<Long, Integer> eachID: IDMap.entrySet()){
			if(count < from || count >= from + length){
				count++;
				continue;
			}
			tfidf = StandardWeight.getWeight((eachID.getValue()).doubleValue(), result.docfreq,
					memman.getCounter());
			results.add(new HashIDExt(VirtualContentHash.getContentHash((eachID.getKey()).longValue()),
					(eachID.getKey()).longValue(), tfidf));
			count++;
		}

		return results;
	}

	// make a intersection among multiple index.
	@SuppressWarnings("unchecked")
	private ArrayList<HashIDExt> intersection(int prior, SearcherQueue structure){
		ArrayList<HashIDExt> results = new ArrayList<HashIDExt>();

		int ptrlen = structure.getCount();
		long[] currentPtrs = new long[ptrlen];
		Iterator<Entry<Long, Integer>>[] iters = new Iterator[ptrlen];
		Entry<Long, Integer>[] entries = new Entry[ptrlen];
		TreeMap<Long, Integer>[] sortedEntries = new TreeMap[ptrlen];
		int[] currentNum = new int[ptrlen];
		int counter = 0;
		boolean samePtr = false;
		boolean isDone = false;
		int newestIndex;
		long newestPtr;
		long comps;
		int i;

		// order by n
		for(i = 0; i < ptrlen; i++){
			structure.getResult(i).invIndex[prior].firstID = structure.getResult(i).firstIDs[prior];
			sortedEntries[i] = getAllContentList(structure.getTermHash(i),
					structure.getResult(i).invIndex[prior]);
			iters[i] = sortedEntries[i].entrySet().iterator();
			entries[i] = iters[i].next();
			currentPtrs[i] = (entries[i].getKey()).longValue();
			currentNum[i] = 0;
		}

		while(counter < 100000){
			samePtr = true;
			newestIndex = 0;
			newestPtr = currentPtrs[0];
			// match whether pointer equals or not
			for(i = 1; i < ptrlen; i++){
				comps = newestPtr - currentPtrs[i];
				if(comps < 0){
					newestIndex = i;
					newestPtr = currentPtrs[i];
					samePtr = false;
				}else if(samePtr && comps > 0) samePtr = false;
			}

			if(samePtr){
				// all pointers matched
				double tfidf = 0.0;
				for(i = 0; i < ptrlen; i++)
					tfidf += StandardWeight.getWeight((entries[i].getValue()).doubleValue(),
							structure.getResult(i).docfreq, memman.getCounter());

				results.add(new HashIDExt(VirtualContentHash.getContentHash(newestPtr), newestPtr, tfidf));
				counter++;
				for(i = 0; i < ptrlen; i++){
					// no more pointer
					if(!iters[i].hasNext()){
						isDone = true;
						break;
					}
					entries[i] = iters[i].next();
					currentPtrs[i] = entries[i].getKey().longValue();
				}
			}else{
				// take pointer to next
				currentNum[newestIndex]++;
				// no more pointer
				if(!iters[newestIndex].hasNext()){
					isDone = true;
					break;
				}
				entries[newestIndex] = iters[newestIndex].next();
				currentPtrs[newestIndex] = entries[newestIndex].getKey().longValue();
			}
			if(isDone) break;
		}
		return results;
	}

	private Queue[] getContents(ArrayList<HashIDExt> hash, int from, int length, int priorIndex){
		Queue[] result = new Queue[(length > hash.size() ? hash.size() : length)];
		Node lookupNode;
		HashIDExt eachHash;
		int skipped = 0;

		for(int i = 0; i < hash.size(); i++){
			if(i < from || i >= from + length) continue;
			eachHash = hash.get(i);
			LogParser.finest("Lookup: " + eachHash);
			Entry<Long, Node> lookupEntry = hashList.floorEntry(Long.valueOf(eachHash.ID));
			if(lookupEntry == null) lookupEntry = hashList.lastEntry();
			lookupNode = lookupEntry.getValue();
			if(lookupNode == myNode){
				result[i - from] = memman.lookupContent(eachHash.ID, priorIndex);
				// TODO: Get content accurately
				if(result[i - from] == null || result[i - from].equals(Queue.EMPTY_QUEUE)){
					from++;
					skipped++;
					continue;
				}
				try{
					result[i - from].score = eachHash.score;
				}catch(Exception e){
					System.out.println("Error when reading ID " + eachHash.ID + " index " + priorIndex);
				}
			}else{
				result[i - from] = ReplicateCommunicator.getContentSet(eachHash.ID, priorIndex,
						contentInfo.getContentNum(), contentInfo.getAppendixNum(), lookupNode);
				result[i - from].score = eachHash.score;
			}
		}
		if(skipped != 0){
			result = Arrays.copyOf(result, result.length - skipped);
			System.out.println("New length: " + result.length);
		}

		return result;
	}

	public void readMemory(){
		memman.initializeMemory();
	}

	public void saveMemory(){
		memman.saveMemory();
	}

	// ///////////////////// Implement for P2P service //////////////////////
	/**
	 * Invokes process to index content from communicator.
	 * @param queue - Informations of content
	 */
	public void invokeIndexContent(Queue queue){
		// run with less thread
		ContentIndexerClient.addToQueue(queue);
	}

	/**
	 * Invokes process to index terms from communicator.
	 * @param str - term
	 * @param termfreq - term frequency in one content
	 * @param ID - content ID
	 * @param valueType - content's type
	 */
	public void invokeIndexTerm(String str, int termfreq, long ID, int valueType){
		// run with less thread
		TermIndexerClient.addToQueue(TermIndexee.asValue(str, ID, termfreq, valueType));
	}

	/**
	 * Invokes process to search term.
	 * @param query - search query
	 * @param valueType - content's type
	 * @return result for search in <code>ExtendTerm</code>
	 */
	public ExtendTerm invokeSearch(String query, int valueType){
		if(!IndexableSwitch.isIndexable()) try{
			IndexableSwitch.waitIfLocked();
		}catch(InterruptedException ignored){}
		return memman.searchTerm(query, valueType);
	}

	public void invokeDelete(long ID){
		DeleterClient.addToQueue(ID);
	}

	public Future<ArrayList<Queue>> getContentSnapshot(long hashFrom, long hashTo, int valueType){
		ExecutorService snapperService = Executors.newCachedThreadPool();

		return snapperService.submit(new ContentSnapshoter(memman, hashFrom, hashTo, valueType));
	}

	public Future<ArrayList<ExtendTerm>> getTermSnapshot(long hashFrom, long hashTo, int valueType){
		ExecutorService snapperService = Executors.newCachedThreadPool();

		return snapperService.submit(new TermSnapshoter(memman, hashFrom, hashTo, valueType));
	}

	public LinkedHashMap<Long, Integer> getContentsByHash(LookupPointer ptr, int length){
		return memman.getContentsByHash(ptr, length);
	}

	public Future<LinkedHashMap<Long, Integer>> getInvIndexSnapshot(LookupPointer ptr, int length){
		ExecutorService snapperService = Executors.newCachedThreadPool();

		return snapperService.submit(new InvertIndexSnapshoter(memman, ptr, length));
	}

	public ArrayType putInfo(){
		MessagePackObject[] infoPack = new MessagePackObject[16 + contentInfo.getContentNum()
				+ contentInfo.getAppendixNum()];
		infoPack[0] = IntegerType.create(blockNum);
		infoPack[1] = IntegerType.create(blockSize);
		infoPack[2] = IntegerType.create(replication);
		infoPack[3] = IntegerType.create(0);
		infoPack[4] = IntegerType.create(virtualNodes);
		infoPack[5] = IntegerType.create(beaconInterval);
		infoPack[6] = IntegerType.create(beaconThreshold);
		infoPack[7] = IntegerType.create(backupInterval);
		infoPack[8] = IntegerType.create(0); // Reserved for further extensions
		infoPack[9] = IntegerType.create(0);
		infoPack[10] = IntegerType.create(0);
		infoPack[11] = IntegerType.create(0);
		infoPack[12] = IntegerType.create(LogParser.getLevel().intValue());
		infoPack[13] = IntegerType.create(contentInfo.getContentNum());
		infoPack[14] = IntegerType.create(contentInfo.getAppendixNum());
		infoPack[15] = RawType.create(contentInfo.getConnection());
		for(int i = 0; i < contentInfo.getContentNum(); i++)
			infoPack[16 + i] = RawType.create((contentInfo.getContentName(i)));
		for(int i = 0; i < contentInfo.getAppendixNum(); i++)
			infoPack[16 + contentInfo.getContentNum() + i] = RawType.create((contentInfo.getAppendixName(i)));

		return ArrayType.create(infoPack);
	}

	public int getQueueSize(int type){
		if(type == 0) return ((ThreadPoolExecutor)execIndex).getPoolSize();
		else if(type == 1) return ((ThreadPoolExecutor)execSearch).getPoolSize();
		else return -1;
	}

	public Queue lookupContent(long ID, int index){
		return memman.lookupContent(ID, index);
	}

	// Get an information of host.
	private void getInformation(){
		if(firstHost != null){
			for(String hosts: firstHost){
				ReplicateInfo repInfo = ReplicateCommunicator.getInfo(hosts);
				if(repInfo == null) continue;
				this.blockNum = repInfo.blockNum();
				this.blockSize = repInfo.blockSize();
				this.replication = repInfo.replication();
				this.contentInfo = repInfo.contentInfo();
				this.virtualNodes = repInfo.virtualNodes();
				this.beaconInterval = repInfo.beaconInterval();
				this.beaconThreshold = repInfo.beaconThreshold();
				this.backupInterval = repInfo.backupInterval();
				LogParser.changeLevel(Level.parse(Integer.toString(repInfo.logLevel())));
				Node.property(virtualNodes, beaconThreshold);
				myNode.refreshHashTable();
			}
		}
	}

	// Add a node.
	public void addNode(Node newNode){
		int suspendIndex = nodeList.indexOf(newNode);
		if(suspendIndex == -1){
			// add to chain
			nodeList.add(newNode);
			refreshHashList();
		}else{
			Node nodeInList = nodeList.get(suspendIndex);
			resume(nodeInList);
		}
	}

	// Registers node.
	// TODO: ノード情報の不整合 ex.チェーンが2分割されるとき
	private void registerNode(){
		for(String host: firstHost){
			// Order to get node list
			try{
				nodeList = ReplicateCommunicator.getNodeList(host);
			}catch(Exception e){
				LogParser.warning("Node might be not found, retry in next node.");
				continue;
			}
			// Send own node information use with node list
			ArrayList<Node> notConnected = new ArrayList<Node>();
			boolean result;
			for(Node eachNode: nodeList){
				result = ReplicateCommunicator.register(eachNode.getAddr(), myNode.getAddr(),
						myNode.getPort());
				if(!result) notConnected.add(eachNode);
			}
			// Remove from list that cannot connect
			for(Node deleted: notConnected)
				nodeList.remove(deleted);
			
			// synchronize timings with other nodes
			synchronizeClock();
			for(Node eachNode: nodeList)
				ReplicateCommunicator.synchronizeClock(eachNode);
			// add myself
			nodeList.add(myNode);
			refreshHashList();
			break;
		}
	}

	// Calls for getting data.
	private void callForData(){
		// If I suspended, resume data from local file
		long backupID = getSuspendInfo();
		if(backupID != -1) {
			revert(backupID);
			return;
		}
		
		// Copy whole data if this node does not suspended
		Entry<Long, Node> traversal = hashList.firstEntry();
		Entry<Long, Node> copyNode = hashList.higherEntry(traversal.getKey());
		if(copyNode == null) copyNode = hashList.firstEntry();
		Entry<Long, Node> hashFromEntry = hashList.lastEntry();
		Entry<Long, Node> hashToEntry = hashList.firstEntry();
		TreeMap<Long, Long> hashRanges = new TreeMap<Long, Long>();

		for(int i = 0; i < replication - 1; i++){
			hashFromEntry = hashList.lowerEntry(hashFromEntry.getKey());
			if(hashFromEntry == null) hashFromEntry = hashList.lastEntry();
		}
		long copiedRange = -1L, fromHash, toHash;
		boolean isFirst = true, behindList = true;

		if(hashList.size() <= replication){
			while(true){
				System.out.println("MyNode: " + myNode);
				System.out.println("copyNode: " + copyNode.getValue());
				if(copyNode.getValue() != myNode){
					hashRanges.put(0L, -1L);
					copyItems(0L, -1L, copyNode.getValue());
					return;
				}
				copyNode = hashList.higherEntry(copyNode.getKey());
				if(copyNode == null) copyNode = hashList.firstEntry();
			}
		}
		for(int i = 0; i < hashList.size(); i++){
			if(traversal.getValue() == myNode){
				fromHash = hashFromEntry.getKey().longValue();
				toHash = hashToEntry.getKey().longValue();
				if(!isFirst){
					if(behindList){
						if(fromHash > copiedRange) fromHash = copiedRange;
					}else{
						if(fromHash < copiedRange) fromHash = copiedRange;
					}
				}
				// replicate items
				copyItems(fromHash, toHash, copyNode.getValue());
				copiedRange = toHash;
				isFirst = false;
			}
			traversal = hashList.higherEntry(traversal.getKey());
			copyNode = hashList.higherEntry(copyNode.getKey());
			if(copyNode == null) copyNode = hashList.firstEntry();
			hashFromEntry = hashList.higherEntry(hashFromEntry.getKey());
			if(hashFromEntry == null){
				hashFromEntry = hashList.firstEntry();
				behindList = false;
			}
			hashToEntry = hashList.higherEntry(hashToEntry.getKey());
			if(hashToEntry == null) hashToEntry = hashList.firstEntry();
		}
	}

	public void copyItems(long hashFrom, long hashTo, Node fromNode){
		int count = 0;
		// Copy terms
		System.out.printf("%nTerm range: 0x%016x - 0x%016x%n", hashFrom, hashTo);
		HashMap<String, ArrayList<LookupPointer>> termMap = ReplicateCommunicator.getTermList(fromNode,
				hashFrom, hashTo, myNode.getAddr(), myNode.getPort(), true, true);
		LinkedHashMap<Long, Integer> invIndices = null;
		for(Entry<String, ArrayList<LookupPointer>> eachTerm: termMap.entrySet()){
			for(int i = 0; i < eachTerm.getValue().size(); i++){
				invIndices = ReplicateCommunicator.getInvertIndexList(fromNode, eachTerm.getValue().get(i));
				for(Entry<Long, Integer> index: invIndices.entrySet())
					TermIndexerClient.addToQueue(TermIndexee.asValue(eachTerm.getKey(), index.getKey()
							.longValue(), index.getValue().intValue(), 0));
			}
			if(count++ % 500 == 0 || count == termMap.size()){
				// show indicator
				String sharps = " ";
				for(int j = 0; j < (count * 48 / termMap.size()); j++)
					sharps = sharps.concat("#");
				for(int j = sharps.length(); j <= 48; j++)
					sharps = sharps.concat(" ");

				System.out.printf("\r%3d pct.%s %8d", (count * 100 / termMap.size()), sharps, count);
			}
		}

		System.out.printf("%nContent range: 0x%016x - 0x%016x%n", hashFrom, hashTo);
		// copy contents
		HashSet<Queue> contentSet = ReplicateCommunicator.getContentListFromNode(fromNode, hashFrom, hashTo,
				myNode.getAddr(), myNode.getPort(), contentInfo, true, true);
		for(Queue eachQueue: contentSet){
			ContentIndexerClient.addToQueue(eachQueue);
			if(count++ % 100 == 0 || count == contentSet.size()){
				// show indicator
				String sharps = " ";
				for(int j = 0; j < (count * 48 / (contentSet.size() == 0 ? 1 : contentSet.size())); j++)
					sharps = sharps.concat("#");
				for(int j = sharps.length(); j <= 48; j++)
					sharps = sharps.concat(" ");

				System.out.printf("\r%3d pct.%s %8d",
						(count * 100 / (contentSet.size() == 0 ? 1 : contentSet.size())), sharps, count);
			}
		}
		System.out.println("\nEnd.");
	}

	// hashList needs to synchronize
	private void refreshHashList(){
		if(hashList == null) hashList = new ConcurrentSkipListMap<Long, Node>();
		Node currentNode;

		// Get whole hash table
		for(int i = 0; i < nodeList.size(); i++){
			currentNode = nodeList.get(i);
			for(Long eachHash: currentNode)
				hashList.put(eachHash, currentNode);
		}

		Entry<Long, Node> currentEntry = hashList.firstEntry();
		Entry<Long, Node> nextEntry = hashList.higherEntry(currentEntry.getKey());
		// Delete the same node in a row
		for(; nextEntry != null;){
			// Every entry can compare in pointers
			if(currentEntry.getValue().getAddr().equals(nextEntry.getValue().getAddr()))
				hashList.remove(currentEntry.getKey());
			// FIXME: High cost!
			currentEntry = hashList.higherEntry(currentEntry.getKey());
			nextEntry = hashList.higherEntry(nextEntry.getKey());
		}
		if(hashList.size() != 1 && hashList.firstEntry().getValue() == hashList.lastEntry().getValue())
			hashList.remove(hashList.lastEntry().getKey());
	}

	// It works for deletion.
	public void deleteReplication(String IPaddr, int port){
		Node deletedNode = Node.createTemp(IPaddr, port);
		Node deletedInList = nodeList.get(nodeList.indexOf(deletedNode));
		TermIndexerHost.nodeDead(deletedInList);
		ContentIndexerHost.nodeDead(deletedInList);
		if(nodeList.size() <= replication){
			nodeList.remove(deletedInList);
			refreshHashList();
			return;
		}

		int count = 0;
		// Entry for traversal
		Entry<Long, Node> currentEntry = hashList.firstEntry();
		// Hash range to be copied
		Entry<Long, Node> prevFromEntry = hashList.lastEntry();
		Entry<Long, Node> prevToEntry = prevFromEntry;
		// Record deleted entry
		Entry<Long, Node> deletedEntry = null;
		// Entry to replicate
		Entry<Long, Node> repEntry = hashList.lastEntry();

		for(int i = 0; i < replication - 1; i++){
			prevFromEntry = hashList.lowerEntry(prevFromEntry.getKey());
			prevToEntry = hashList.lowerEntry(prevToEntry.getKey());
		}
		prevFromEntry = hashList.lowerEntry(prevFromEntry.getKey());
		currentEntry = hashList.firstEntry();
		for(int i = 0; i < hashList.size(); i++){
			if(++count <= replication && currentEntry.getValue() == myNode){
				// Copy from the node
				if(repEntry.getValue() != myNode)
					copyItems(prevFromEntry.getKey().longValue(), prevToEntry.getKey().longValue(),
							repEntry.getValue());
			}

			if(count == replication) repEntry = hashList.higherEntry(deletedEntry.getKey());
			if(currentEntry.getValue().equals(deletedNode)){
				count = 0;
				deletedEntry = hashList.ceilingEntry(currentEntry.getKey());
				repEntry = hashList.lowerEntry(currentEntry.getKey());
			}

			currentEntry = hashList.higherEntry(currentEntry.getKey());
			prevFromEntry = hashList.higherEntry(prevFromEntry.getKey());
			if(prevFromEntry == null) prevFromEntry = hashList.firstEntry();
			prevToEntry = hashList.higherEntry(prevToEntry.getKey());
			if(prevToEntry == null) prevToEntry = hashList.firstEntry();
		}

		// delete from chain
		nodeList.remove(deletedNode);
		refreshHashList();
	}

	public void showNodes(){
		int count = 0;
		System.out.printf("Total node: %d, Issued hash per node: %d%n", nodeList.size(), virtualNodes);
		for(Node eachNode: nodeList){
			System.out.printf("%d: Address-%s client-%s", (++count), eachNode.getAddr(),
					(eachNode.client() != null ? "connect   " : "disconnect"));
			if(eachNode.equals(myNode)) System.out.printf(" [Running] Indexable-%s%n",
					(IndexableSwitch.isIndexable() ? "Yes" : "No"));
			else if(eachNode.isSuspend()) System.out.printf(" [Suspend] Left: %d sec%n", eachNode.timeLeft());
			else System.out.printf(" [Running]%n");
		}
	}

	public void showProcesses(){
		System.out.printf("Thread - Indexer/Deleter: %d, Searcher: %d%n", indexThreadNum, searchThreadNum);
		System.out.println("Queues...");
		System.out.printf("Term:     Host %4d / Client %4d%n", TermIndexerHost.residue(),
				TermIndexerClient.residue());
		System.out.printf("Contents: Host %4d / Client %4d%n", ContentIndexerHost.residue(),
				ContentIndexerClient.residue());
		System.out
				.printf("Delete:   Host %4d / Client %4d%n", DeleterHost.residue(), DeleterClient.residue());
	}

	public CopyOnWriteArrayList<Node> getNode(){
		return nodeList;
	}

	public ContentInfo contentInfo(){
		return contentInfo;
	}

	public int getCounter(){
		return memman.getCounter();
	}

	public int getSearchcounter(){
		return memman.getSearchcounter();
	}

	public int getTerms(){
		return memman.getTerms();
	}

	public int getSkipPointerLen(){
		return memman.getSkipPointerLen();
	}

	public void showBackupList(){
		memman.showBackupList();
	}

	public void getMemoryList(){
		memman.getMemoryList();
	}

	public void optimizeMemory(){
		memman.optimizeMemory();
	}

	public void takeSnapshot(){
		memman.takeSnapshot();
	}

	// make backup file with specified entry ID
	public void backup(){
		for(Node nodes: nodeList)
			if(!nodes.equals(myNode)) ReplicateCommunicator.backupLatest(nodes);
		memman.backupLatest();
		// Clear client behavior log
		try{
			TermIndexerClient.clearLog();
			ContentIndexerClient.clearLog();
		}catch(IOException e){
			System.err.println("Logger is broken.");
		}
	}

	public void revert(long entry){
		if(entry == -1L){
			for(Node nodes: nodeList)
				if(!nodes.equals(myNode)) ReplicateCommunicator.revertLatest(nodes);
			memman.revertLatest();
		}else memman.revertByEntry(entry);
		// Revert behavior logs from file
		try{
			TermIndexerClient.revert();
			ContentIndexerClient.revert();
		}catch(FileNotFoundException e){
			System.err.println("Log file did not found. Data might be lost.");
		}catch(IOException e){
			System.err.println("Log file is broken. Data might be lost.");
		}
	}

	// Synchronize timings to emit beacon and backup
	public void synchronizeClock(){
		System.out.println("Synchronize: " + System.currentTimeMillis());
		beaconThread.interrupt();
		memman.synchronizeClock();
	}

	// Suspend specified node
	public void suspend(String IPaddr, int port, long suspendTime){
		Node tempNode = Node.createTemp(IPaddr, port);
		Node nodeFromList = nodeList.get(nodeList.indexOf(tempNode));
		nodeFromList.suspend(suspendTime);
	}

	private void issueSuspendInfo(){
		File infoFile = new File(".suspendInfo");
		if(infoFile.exists()) infoFile.delete();
		try{
			infoFile.createNewFile();
		}catch(IOException e){
			System.err.println("Suspend file not issued due to interval error.");
			LogParser.fine("Error: While creating file \".suspendInfo\", IOException");
		}

		FileOutputStream stream = null;
		try{
			stream = new FileOutputStream(infoFile);
			stream.write(ByteConverter.longToByte(System.currentTimeMillis(), 8));
			stream.write(ByteConverter.longToByte(20 * 60, 8));
			stream.write(ByteConverter.longToByte(memman.latestBackupIndex(), 8));
		}catch(FileNotFoundException e){}catch(IOException e){}finally{
			if(stream != null) try{
				stream.close();
			}catch(IOException e){}
		}
	}

	// Output backup index
	private long getSuspendInfo(){
		File infoFile = new File(".suspendInfo");
		if(!infoFile.exists()) return -1;
		byte[] readbyte = new byte[8];
		long suspendTime, suspendLength;
		long ID = -1;
		FileInputStream stream = null;
		try{
			stream = new FileInputStream(infoFile);
			stream.read(readbyte);
			suspendTime = ByteConverter.byteToLong(readbyte);
			stream.read(readbyte);
			suspendLength = ByteConverter.byteToLong(readbyte);
			if(suspendTime + (suspendLength * 1000) >= System.currentTimeMillis()){
				stream.read(readbyte);
				ID = ByteConverter.byteToLong(readbyte);
			}
		}catch(FileNotFoundException e){}catch(IOException e){}finally{
			if(stream != null) try{
				stream.close();
			}catch(IOException e){}
		}
		return ID;
	}

	// Suspend my node
	public void suspendMyNode(long suspendTime){
		// Backup my datas
		memman.backupLatest();
		issueSuspendInfo();
		for(Node eachNode: nodeList){
			if(eachNode != myNode) ReplicateCommunicator.suspend(eachNode, myNode, suspendTime);
		}
	}

	public void resume(Node node){
		int nodeIndex = nodeList.indexOf(node);
		// There likely to be deleted node
		if(nodeIndex == -1) return;
		Node nodeFromList = nodeList.get(nodeIndex);
		nodeFromList.resume();
		TermIndexerHost.resumeQueue(nodeFromList);
		ContentIndexerHost.resumeQueue(nodeFromList);
	}
}
