package clustersimulator;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;

import joptsimple.OptionParser;
import joptsimple.OptionSpec;
import joptsimple.OptionSet;
import au.com.bytecode.opencsv.CSV;
import au.com.bytecode.opencsv.CSVWriter;
import au.com.bytecode.opencsv.CSVReader;
import net.sf.javaml.core.Dataset;
import net.sf.javaml.core.DefaultDataset;
import net.sf.javaml.core.Instance;
import net.sf.javaml.core.DenseInstance;

public class ClusterSimulator {

	public static void main(String[] args) throws IOException {

		OptionParser parser = new OptionParser();
		OptionSpec<Integer> minJobs = parser.accepts( "minJobs" ).withOptionalArg().ofType( Integer.class ).defaultsTo(0);
		OptionSpec<Integer> maxJobs = parser.accepts( "maxJobs" ).withOptionalArg().ofType( Integer.class ).defaultsTo(8);
		OptionSpec<Integer> minProcessing = parser.accepts( "minProcessing" ).withOptionalArg().ofType( Integer.class ).defaultsTo(1);
		OptionSpec<Integer> maxProcessing = parser.accepts( "maxProcessing" ).withOptionalArg().ofType( Integer.class ).defaultsTo(16);
		OptionSpec<Integer> minADOC = parser.accepts( "minADOC" ).withOptionalArg().ofType( Integer.class ).defaultsTo(1);
		OptionSpec<Integer> maxADOC = parser.accepts( "maxADOC" ).withOptionalArg().ofType( Integer.class ).defaultsTo(8);
		OptionSpec<Integer> queueSize = parser.accepts( "queueSize" ).withOptionalArg().ofType( Integer.class ).defaultsTo(128);
		OptionSpec<Integer> time = parser.accepts( "time" ).withOptionalArg().ofType( Integer.class ).defaultsTo(300000);
		OptionSpec<Integer> sample = parser.accepts( "sample" ).withOptionalArg().ofType( Integer.class ).defaultsTo(10000);
		OptionSpec<String> output = parser.accepts( "output" ).withOptionalArg().defaultsTo("output.csv");
		OptionSpec<String> clusterFile = parser.accepts( "clusterFile" ).withOptionalArg();
		
		OptionSet options = parser.parse(args);
		
		CSV csv = CSV.separator(',').quote('"').create();
		


		Dataset mesh = generateMesh(20);
		Dataset[] clusters;
		Instance[] centroids;
		
		if (options.hasArgument(clusterFile)) {
			CSVReader infile = csv.reader(options.valueOf(clusterFile));
			List<String[]> clusterBuffer = infile.readAll();
			clusters = new Dataset[clusterBuffer.size()];
			centroids = new Instance[clusterBuffer.size()];
			int i=0;
			for (String[] clusterString : clusterBuffer) {
				//for now we'll create dummy nodes
				//later the clusterfile will include node coordinates
				clusters[i] = new DefaultDataset();
				for (int j=0; j < Integer.parseInt(clusterString[0]); j++)
					clusters[i].add(new DenseInstance(new double[] {0.0, 0.0}));
				centroids[i] = new DenseInstance(new double[] {Double.parseDouble(clusterString[1]),
																Double.parseDouble(clusterString[2])});
				i++;
			}
		} else {
			CustomClusterer km = new CustomKMedoids();
			clusters = km.cluster(mesh);		
			centroids = km.getCentroids();
		}
		
		ProcessingCluster[] procClusters = new ProcessingCluster[clusters.length];
		ArrayList<Job> finishedJobs = new ArrayList<Job>(128);

		for (int i=0; i < clusters.length; i++) {
			procClusters[i] = new ProcessingCluster(clusters[i].size(), centroids[i], finishedJobs);
		}
		ArrayBlockingQueue<Job> jobQueue = new ArrayBlockingQueue<Job>(options.valueOf( queueSize ));
		JobGenerator generator = new JobGenerator(options.valueOf( minJobs ),
												  options.valueOf( maxJobs ),
												  options.valueOf( minProcessing ),
												  options.valueOf( maxProcessing ),
												  options.valueOf( minADOC ),
												  options.valueOf( maxADOC ));
		NetworkProcessor netCPU = new NetworkProcessor(procClusters);


		long clock = 0;
		Job[] jobBuffer;
		int sampleSize = options.valueOf(time) / options.valueOf(sample);
		long[] idleByTime = new long[sampleSize];
		int[] queueByTime = new int[sampleSize];
		int[] intervals = new int[sampleSize];
		long t = System.currentTimeMillis();
		for (int i=0; i<sampleSize; i++)
			intervals[i] = options.valueOf(sample) * (i+1);
		long end = t + options.valueOf(time);
		while (System.currentTimeMillis() < end)
			{
				if (jobQueue.remainingCapacity() > 0) {
					jobBuffer = generator.generateJobs(clock);
					for (int i=0; i<jobBuffer.length; i++) {
						jobQueue.offer(jobBuffer[i]);
					}
				}

				netCPU.assignJobs(jobQueue, clock);

				netCPU.processJobs(clock);

				clock++;

				for (int i=0; i<sampleSize; i++) {
					if (System.currentTimeMillis() > (t+intervals[i]) && idleByTime[i] == 0) {
						idleByTime[i] = netCPU.totalIdleNodeCycles();
						queueByTime[i] = jobQueue.size();
					}
				}
			}

		long idleNodeCycles = netCPU.totalIdleNodeCycles();

		long totalResponseLatency = 0;
		for (int i=0; i<finishedJobs.size(); i++) {
			totalResponseLatency += finishedJobs.get(i).getTimeAssigned() - finishedJobs.get(i).getTimeEnteredQueue();
		}
		double avgResponseTime = (double) totalResponseLatency / (double) finishedJobs.size();

		
		CSVWriter outstream = csv.writer(options.valueOf( output ));
		outstream.writeNext("Total clock cycles:", ((Long) clock).toString());
		outstream.writeNext("Number of clusters:", ((Integer) clusters.length).toString());
		outstream.writeNext("Job frequency (min/max):", ((Integer) options.valueOf( minJobs )).toString(),
													   ((Integer) options.valueOf( maxJobs )).toString());
		outstream.writeNext("Individual job processing time (min/max):", ((Integer) options.valueOf( minProcessing )).toString(),
				   														((Integer) options.valueOf( maxProcessing )).toString());
		outstream.writeNext("Average degree of concurrency (min/max):", ((Integer) options.valueOf( minADOC )).toString(),
				   													   ((Integer) options.valueOf( maxADOC )).toString());
		outstream.writeNext("Average response time:", ((Double) avgResponseTime).toString());
		outstream.writeNext("Total idle node cycles:", ((Long) idleNodeCycles).toString());
		outstream.writeNext("");
		outstream.writeNext("Interval (seconds)","Idle node cycles","Queue Size");
		List<String[]> sampleOut = new ArrayList<String[]>(sampleSize);
		for (int i=0; i<sampleSize; i++)
			sampleOut.add(i, new String[] {((Integer) (intervals[i]/1000)).toString(),
											   ((Long) idleByTime[i]).toString(),
											   ((Integer) queueByTime[i]).toString()});
		outstream.writeAll(sampleOut);
		outstream.close();
	}

	public static Dataset generateMesh(int row) {
		Dataset mesh = new DefaultDataset();
		for (int i=0; i<row; i++) {
			for (int j=0; j<row; j++) {
				double[] values = new double[] {i,j};
				Instance tmpInstance = new DenseInstance(values);
				mesh.add(tmpInstance);
			}
		}
		return mesh;
	}

}
