package edu.gatech.ors.mrsim.tests.simpleHDFS;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;

import edu.gatech.ors.mrsim.core.Cluster;
import edu.gatech.ors.mrsim.core.ClusterCreator;
import edu.gatech.ors.mrsim.core.Event;
import edu.gatech.ors.mrsim.core.Node;
import edu.gatech.ors.mrsim.core.SimFile;
import edu.gatech.ors.mrsim.core.SimJob;
import edu.gatech.ors.mrsim.core.Simulation;
import edu.gatech.ors.mrsim.core.impl.FileRequest;
import edu.gatech.ors.mrsim.core.impl.FileUserNode;
import edu.gatech.ors.mrsim.core.impl.GenericNode;
import edu.gatech.ors.mrsim.core.impl.HDFSDataNode;
import edu.gatech.ors.mrsim.core.impl.HDFSNameNode;
import edu.gatech.ors.mrsim.io.conf.Configuration;

public class AssignmentTwo implements SimJob, ClusterCreator {

	private SimFile[] filesStored = { new SimFile("file1", 0, 64, 16), new SimFile("file2", 10, 192, 64),
			new SimFile("file3", 15, 64, 16), new SimFile("file4", 7, 192, 64) };
	private String[][] storingNode = { { "DataNode1", "DataNode2" }, { "DataNode2", "DataNode3" },
			{ "DataNode3", "DataNode4" }, { "DataNode1", "DataNode4" } };
	private int[][] nodeToFile = { { 0, 3 }, { 0, 1 }, { 1, 2 }, { 2, 3 } };
	private Random rand = new Random();

	@Override
	public List<Event<?>> getInitEvents(Configuration config, Simulation forSim) {
		ArrayList<Event<?>> events = new ArrayList<Event<?>>();
		for (int i = 0; i < 10; i++) {
			FileUserNode node = (FileUserNode) forSim.getCluster().getNode("FileUserNode" + rand.nextInt(4));
			SimFile file = filesStored[rand.nextInt(filesStored.length)];
			boolean write = rand.nextBoolean();
			if (write) {
				String name = "newFile" + (5 + i);
				int blocks = 16 + rand.nextInt(48);
				SimFile toWrite = new SimFile(name, 0, blocks, 5);
				node.addFileToWrite(toWrite);
				events.add(new FileRequest(rand.nextInt(15), node, toWrite.getName(), true));
			} else {
				node.addFileToRetrive(file.getName());
				events.add(new FileRequest(rand.nextInt(15), node, file.getName()));
			}
		}
		return events;
	}

	@Override
	public boolean eventsSuccess() {
		return true;
	}

	@Override
	public Cluster createCluster(Configuration config) {
		HashMap<String, Node> nodes = new HashMap<String, Node>();
		int count = 0;
		for (int i = 0; i < 11; i++) {
			String name = null;
			Node node = null;
			if (i < 4) {
				name = "FileUserNode" + (count++);
				node = new FileUserNode(name);
			} else if (i < 6) {
				if (i == 4)
					count = 0;
				name = "NameNode" + (count++);
				node = new HDFSNameNode(name);
				int fileOne = (i - 4) * 2;
				int fileTwo = (i - 4) * 2 + 1;
				((HDFSNameNode) node).addFileLocations(filesStored[fileOne].getName(), storingNode[fileOne]);
				((HDFSNameNode) node).addFileLocations(filesStored[fileTwo].getName(), storingNode[fileTwo]);
			} else if (i < 10) {
				int nodeNum = i - 6;
				HashMap<String, SimFile> files = new HashMap<String, SimFile>();
				for (int f = 0; f < nodeToFile[nodeNum].length; f++) {
					int filenum = nodeToFile[nodeNum][f];
					files.put(filesStored[filenum].getName(), filesStored[filenum]);
				}
				name = "DataNode" + nodeNum;
				node = new HDFSDataNode(name, 2, 0, files);
			} else {
				name = "HDFS host node";
				node = new GenericNode(name);
			}
			nodes.put(name, node);
		}
		return new Cluster(nodes);
	}

	@Override
	public boolean clusterSuccess() {
		return true;
	}

}
