package edu.vt.ece.rt.benchmark;

import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;

import edu.vt.ece.rt.quorum.ClustersManager;
import edu.vt.ece.rt.quorum.util.Names;
import edu.vt.ece.rt.util.Logger.Logger;

public class BenchmarkThread extends Thread{
	int reads;
	int writes;
	int aborts;
	public int messagesSent;
	public int readRequest;
	int commitRequest;
	Benchmark bench;
	public int threadId;
	public int rootAborts;
	public int readAborts;
	public int writeAborts;
	public int innerAborts;


	public BenchmarkThread(int i, Benchmark b) {
		super("Bench_"+i);
		threadId = i;
		System.out.println("My thread id is "+i);
		bench = b;
	}

	public void incrementAborts() {
		aborts++;
	}

	public void incrementRootAborts() {
		rootAborts++;
	}

	public void incrementReadAborts() {
		readAborts++;
	}
	
	public void incrementWriteAborts() {
		writeAborts++;
	}

	public void incrementInnerAborts() {
		innerAborts++;
	}

	
	public void incrementMessage(int m) {
		messagesSent = messagesSent + m;
	}

	public void incrementReadRequest(int m) {
		readRequest = readRequest + m;
	}
	
	public void incrementCommitRequest(int m) {
		commitRequest = commitRequest + m;
	}

	@Override
	public void run() {
		Logger.debug("Started");
		Logger.debug("Started the benchmark thread");
//		System.out.println("Started the benchmark thread for node "+ClustersManager.baseClusterId);
		
		Random random = new Random(this.hashCode());

		int operands = bench.getOperandsCount();
		Logger.debug("Started the benchmark thread operands is "+operands);
		Logger.debug("Calls are "+Integer.getInteger(Names.calls));
		Long start = System.currentTimeMillis();
		for (int i = 0; i < bench.transactions; i++) {
			List<Object> ids = new LinkedList<Object>();
			Collections.shuffle(ids);
			boolean read = random.nextInt(100) < bench.readPercent;
			Logger.debug((read? "R" : "W") + Arrays.toString(ids.toArray()));
			Logger.debug("start");
			try {
				if (read) {
					reads++;
//					if (ClustersManager.baseClusterId != 0)
						bench.readOperation(ids.toArray());
				} else {
					writes++;
//					if (ClustersManager.baseClusterId != 0)
						bench.writeOperation(ids.toArray());
				}
				//						Logger.info("[" + i + "] \t" + ContextDelegator.getInstance().getRetries()  + "\t " + (System.currentTimeMillis() - start) + " \t" + Arrays.toString(ids.toArray()));
				System.err.print(".");
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
		Long end = System.currentTimeMillis();
		Long time = end - start;
		Logger.debug("end");
	}

	public void collectStatsInner(int i) {
		if (i > 0) {
			incrementAborts();
			incrementInnerAborts();
			incrementWriteAborts();
		}
		
//		try {
//			Logger.debug("Sleeping for "+i*(ClustersManager.baseClusterId+1)*100L);
//			Thread.sleep(i*(ClustersManager.baseClusterId)*100L);
//			Logger.debug("Waking up from sleep ["+ClustersManager.baseClusterId+"]");
//		} catch (InterruptedException e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}


	}

	public void collectStatsRoot(int i) {
		if (i > 0) {
			incrementAborts();
			incrementRootAborts();
			incrementWriteAborts();
		}
		
//		try {
////			Logger.debug("Sleeping for "+i*(ClustersManager.baseClusterId+1)*100L);
//			Thread.sleep((ClustersManager.baseClusterId)*50L);
////			Logger.debug("Waking up from sleep ["+ClustersManager.baseClusterId+"]");
//		} catch (InterruptedException e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}
//

	}

}
