package cgl.granules.application;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Properties;
import java.util.LinkedList;

import cgl.granules.dataset.Dataset;
import cgl.granules.dataset.DatasetCollection;
import cgl.granules.dataset.DatasetException;
import cgl.granules.dataset.DatasetFactory;
import cgl.granules.dataset.FileAccess;
import cgl.granules.exception.MarshallingException;
import cgl.granules.operation.MapReduceBase;
import cgl.granules.operation.MapReduceException;
import cgl.granules.results.ResultPayload;
import cgl.granules.results.Results;

/**
 * An example demonstrating the map role within a dummy pipeline operation.
 * 
 * @author Chao Sun
 * 
 */
public class DummyMap extends MapReduceBase {
	private ResultPayload resultPayload;
	String workerType = "Unknown ";

	public DummyMap() {
		setDomain("Generic");
		setVersionInformation(200);
		setOperationIdentifier("DummyPipeline-MapFunction-GranulesSample");
		resultPayload = ResultPayload.getInstance();
	}

	public void execute() {
		Properties props = getProcessingDirectives();

		String property = "Mapper Number";
		if (props.containsKey(property)) {
			workerType = property + ": " + props.getProperty(property);
		} else {
			System.out.println("Mapper info incomplete! exiting ...");
			return;
		}

		String mappernum = props.getProperty(property);

		if (!hasDatasetCollection()) {
			System.out
					.println("No dataset has been initialized. Returning ...");
			return;
		}

		String datasetIdentifier = "Dummy-MapReduce-Pipeline";
		DatasetCollection datasetCollection = getDatasetCollection();
		FileAccess access = null;

		try {
			Dataset dataset = datasetCollection.getDataset(datasetIdentifier);

			if (dataset.getDatasetType() == Dataset.FILES) {
				access = (FileAccess) dataset;
			} else {
				System.out.println("Incorrect datatset: "
						+ dataset.getDatasetType()
						+ " initialized. Returning ...");
				return;
			}

			if (access == null) {
				System.out.println("Unable to initialize the dataset");
				return;
			}

			Enumeration filenames = access.getFilenames();
			LinkedList<String> filelist = new LinkedList<String>();

			while (filenames.hasMoreElements()) {
				String filename = (String) filenames.nextElement();
				filelist.add(readFile(filename));
			}

			int filenum = filelist.size();

			String publishfile[] = new String[filenum];
			for (int i = 0; i < filenum; i++) {
				publishfile[i] = filelist.poll();
			}

			ResultPayload resultPayload = ResultPayload.getInstance();
			byte[] resultPayloadBytes = resultPayload.getBytes(publishfile);

			Results results = createResults(true, true);

			results.setResultPayload(resultPayloadBytes);
			writeResults(results);

			System.out.println("Mapper [" + mappernum
					+ "] finished publishing\n");

			setTeminationConditionReached();
		} catch (DatasetException e) {
			e.printStackTrace();
		} catch (IOException e) {
			System.out.println("Problems converting bytes to longs");
			e.printStackTrace();
		} catch (MarshallingException e) {
			System.out.println("Problems Marshalling: " + e.getMessage());
			e.printStackTrace();
		} catch (MapReduceException e) {
			System.out.println("Problems writing Results: " + e.getMessage());
			e.printStackTrace();
		}
	}

	private String readFile(String filename) throws DatasetException,
			IOException, MarshallingException, MapReduceException {
		DatasetFactory datasetFactory = DatasetFactory.getInstance();
		FileAccess dataset = datasetFactory.createFileDataset("Test",
				"Dummy Pipeline Example");

		String line = "";
		String filecontents = "";
		BufferedReader in = new BufferedReader(new FileReader(filename), 65536);

		while ((line = in.readLine()) != null) {
			filecontents += line;
			filecontents += "\n";
		}
		in.close();

		return filecontents;
	}
}