package cgl.granules.application;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Properties;
import java.util.StringTokenizer;

import cgl.granules.dataset.DatasetCollection;
import cgl.granules.dataset.DatasetException;
import cgl.granules.dataset.DatasetFactory;
import cgl.granules.dataset.FileAccess;
import cgl.granules.exception.CommunicationsException;
import cgl.granules.exception.DeploymentException;
import cgl.granules.exception.MarshallingException;
import cgl.granules.operation.MapReduceException;
import cgl.granules.operation.ProgressTracker;
import cgl.granules.operation.InstanceDeployer;

/**
 * 
 * The dummydeployer builds a dummy pipeline using MapReduce instances,
 * without doing any covertion to the input files
 * 
 * @author Chao Sun
 * 
 */

public class DummyDeployer extends InstanceDeployer {
	private ProgressTracker progressTracker;

	/**
	 * Initializes the deployer.
	 * 
	 * @param streamingProperties
	 *            The properties for streaming
	 * @throws CommunicationsException
	 *             If there are network problems during communications
	 * @throws IOException
	 *             If there are IO problems
	 * @throws MarshallingException
	 *             If there are marshalling erros
	 * @throws DeploymentException
	 *             If there are deployment related problems
	 */
	public DummyDeployer(Properties streamingProperties)
			throws CommunicationsException, IOException, MarshallingException,
			DeploymentException {
		initialize(streamingProperties);
	}

	/**
	 * This method distributes a set of files over the map instances, and
	 * distributes these maps to the reduce instances. The method requires the
	 * file names to be numbered from 0. Thus, if the files are file0.txt
	 * file1.txt etc the filename base that needs to be specified would be
	 * /location/Of/Files/file
	 * 
	 * @param reduce
	 *            The reduce instance for this map
	 * @param mapNum
	 *            The map instance number
	 * @param filenameBase
	 *            The directory in which the files are stored.
	 * @param numOfMaps
	 *            The total number of maps being deployed
	 * @param numOfFiles
	 *            The total number of files that need to be processed
	 * @return The Initialized DummyMap instance.
	 * @throws DatasetException
	 *             If there are problems initializing/using the datasets
	 * @throws MapReduceException
	 *             If there are problems related to Map/Reduce roles within the
	 *             application.
	 */
	private DummyMap initializeDummyMap(DummyReduce reduce, int mapNum,
			String filenameBase, int numOfMaps, int numOfFiles)
			throws DatasetException, MapReduceException {
		DummyMap map = new DummyMap();

		Properties props = new Properties();
		String value = Integer.toString(mapNum);
		String property = "Mapper Number";
		props.put(property, value);
		map.setProcessingDirectives(props);

		DatasetFactory datasetFactory = DatasetFactory.getInstance();
		FileAccess fileAccess = datasetFactory.createFileDataset(
				"Dummy-MapReduce-Pipeline", "MapReduce-Example");

		for (int i = 0; i < numOfFiles; i++) {
			if (i % numOfMaps == mapNum) {
				String filename = filenameBase + i + ".txt";
				System.out.println(filename + "-> assigned to MAP [" + mapNum
						+ "]");
				fileAccess.addFile(filename, "rw");
			}
		}

		DatasetCollection datasetCollection = map.getDatasetCollection();
		datasetCollection.addDataset(fileAccess);

		map.addReduce(reduce);
		//System.out.println("Map [" + mapNum + "] initialization finished");

		return map;
	}

	/**
	 * This method creates a reduce instance
	 * 
	 * @param i
	 *            The identifier number of this reduce
	 * @return The Initialized DummyReduce instance.
	 * @throws DatasetException
	 *             If there are problems initializing/using the datasets
	 * @throws MapReduceException
	 *             If there are problems related to Map/Reduce roles within the
	 *             application.
	 */
	private DummyReduce initializeDummyReduce(int reducenum)
			throws DatasetException, MapReduceException {
		DummyReduce reduce = new DummyReduce();

		Properties props = new Properties();
		String value = Integer.toString(reducenum);
		String property = "Reducer Number";
		props.put(property, value);
		reduce.setProcessingDirectives(props);

		reduce.setAsExecuteWhenDataAvailable();

		//System.out.println("Reduce [" + reducenum + "] initialization finished\n");
		return reduce;
	}

	/**
	 * Prepares a set of Map-Reduce instances for the example pipeline and
	 * deploys it on the set of available resources.
	 * 
	 * @param numOfMaps
	 *            The total number of maps being deployed
	 * @param numOfReduces
	 *            The total number of reduces being deployed
	 * @param filenameBase
	 *            The directory in which the files are stored.
	 * @param numOfFiles
	 *            The total number of files that need to be processed
	 * @throws DatasetException
	 *             If there are problems initializing the datasets
	 * @throws MapReduceException
	 *             If there is a MapReduceExcecution because of problems when
	 *             setting up the roles.
	 * @throws IOException
	 *             If there are IO problems.
	 * @throws CommunicationsException
	 *             If there are network problems during communications
	 * @throws DeploymentException
	 *             If there are deployment related problems
	 * @throws MarshallingException
	 *             If there are marshalling problems.
	 * @throws InterruptedException
	 *             If there are problems sleeping
	 */
	public void prepareInstancesAndDeploy(int numOfMaps, int numOfReduces,
			String filenameBase, int numOfFiles) throws MapReduceException,
			CommunicationsException, DeploymentException, MarshallingException,
			DatasetException, InterruptedException {
		DummyMap maps[] = new DummyMap[numOfMaps];
		DummyReduce reduces[] = new DummyReduce[numOfReduces];

		for (int i = 0; i < numOfReduces; i++) {
			reduces[i] = initializeDummyReduce(i);
		}

		for (int i = 0; i < numOfMaps; i++) {
			for (int j = 0; j < numOfReduces; j++) {
				if (i % numOfReduces == j) {
					maps[i] = initializeDummyMap(reduces[j], i, filenameBase,
							numOfMaps, numOfFiles);
					System.out.println("Reduce [" + j + "] binding to map ["
							+ i + "]\n");
				}
			}
		}

		System.out.println("Initialization finished\n");
		resetTrackers();

		try {
			progressTracker = deployOperations(reduces);
			String jobId = progressTracker.getIdentifier();

			Thread.sleep(1000);

			deployOperations(maps, jobId);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		String module = "DummyDeployer.main()->";

		if (args.length < 2) {
			System.out.println("Usage: java cgl.sensorgrid.mapreduce."
					+ "DummyDeployer <nbhostname> <nbportnum>");
			System.exit(0);
		}

		String hostname = args[0];
		String portnum = args[1];
		int numOfMaps = 2;
		int numOfFiles = 2;
		int numOfReduces = 1;
		String filenameBase = "/globalhome/chaosun/test/inputfiles/file";

		Properties streamingProperties = new Properties();
		streamingProperties.put("hostname", hostname);
		streamingProperties.put("portnum", portnum);

		String helpInfo = "\nThis is the dummy MapReduce pipeline test program that uses Granules."
				+ "The current list of commands include:\n"
				+ "submit <num_of_maps> <num_of_reduces> <num_of_resources> <filename_base>\n"
				+ "status        : Lists the status of the all Jobs\n"
				+ "help          : List the set of commands\n";

		try {
			DummyDeployer deployer = new DummyDeployer(streamingProperties);
			StringTokenizer tok;

			InputStreamReader isr = new InputStreamReader(System.in);
			BufferedReader commandLine = new BufferedReader(isr);

			System.out.println(helpInfo);

			while (true) {
				String s = commandLine.readLine();
				if (s == null) {
					System.out.println(module + "String is null!!!");
					break;
				}

				if (s.startsWith("submit")) {
					System.out.println(module
							+ "Processing the [submit] command");

					tok = new StringTokenizer(s, " ");
					int numOfTokens = tok.countTokens();
					tok.nextToken();/* skip the command */

					if (numOfTokens > 1) {
						numOfMaps = Integer.parseInt(tok.nextToken());
					}

					if (numOfTokens > 2) {
						numOfReduces = Integer.parseInt(tok.nextToken());
						if (numOfReduces > numOfMaps) {
							numOfReduces = numOfMaps;
						}
					}

					if (numOfTokens > 3) {
						numOfFiles = Integer.parseInt(tok.nextToken());
					}

					if (numOfTokens > 4) {
						filenameBase = tok.nextToken();
					}

					String params = "\nNumber of Maps     : " + numOfMaps
							+ "\nNumber of Reduces  : " + numOfReduces
							+ "\nNumber of Files    : " + numOfFiles
							+ "\nFilename Base      : " + filenameBase + "\n";
					System.out.println(params);

					deployer.prepareInstancesAndDeploy(numOfMaps, numOfReduces,
							filenameBase, numOfFiles);

					continue;
				}

				if (s.startsWith("status")) {
					System.out.println(deployer.getStatus());
					continue;
				}

				if (s.startsWith("help")) {
					System.out.println(helpInfo);
					continue;
				}
			}

		} catch (Exception e) {
			System.out.println(e.getMessage());
			e.printStackTrace();
		}
	}
}