package mapreduce.tools;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
//import org.apache.hadoop.io.SequenceFile.CompressionType;
//import org.apache.hadoop.io.compress.CompressionCodec;
//import org.apache.hadoop.util.ReflectionUtils;

//import com.hadoop.compression.lzo.LzoCodec;

import mapreduce.kmeans.KMeansClusteringJob;
import mapreduce.model.ClusterCenter;
import mapreduce.model.Vector;


public class ConvertToStreamFile {
	String inputFileName;
	int numOfAttr;
	Configuration conf;
	Path in;
	ClusterCenter center;

	public ConvertToStreamFile(String inputFileName, int numOfAttr) {
		this.inputFileName = inputFileName;
		this.numOfAttr = numOfAttr;
		this.conf = new Configuration();
		this.in  = new Path(KMeansClusteringJob.PATH_DATA);
		
		double[] centerZeroVec = new double[numOfAttr];
		Arrays.fill(centerZeroVec, 0);
		Vector tempVec = new Vector();
		tempVec.setVector(centerZeroVec);
		center = new ClusterCenter(tempVec);
	}

	private void convert() {
		File inputFile = new File(this.inputFileName);
		double[] doubleArray = new double[this.numOfAttr];		

		Vector vec = new Vector();
		try {
			FileSystem fs = FileSystem.get(conf);
			//CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(LzoCodec.class, conf);
			SequenceFile.Writer dataWriter = 
				SequenceFile.createWriter(
						fs, conf, in, ClusterCenter.class, Vector.class/* , CompressionType.BLOCK, codec*/);
			
			FileReader reader = new FileReader(inputFile);
			BufferedReader bReader = new BufferedReader(reader);

			String line;
			byte i = 0;
			long pointsRead = 0;
			while ((line = bReader.readLine()) != null) {
				if (i == this.numOfAttr) {
					vec.setVector(doubleArray);
					
					dataWriter.append(center, vec);
					pointsRead++;
					i = 0;
				}
				doubleArray[i] = Double.parseDouble(line);
				i++;
			}
			
			// Write the last point...
			if (i == this.numOfAttr) {
				vec.setVector(doubleArray);
				
				dataWriter.append(center, vec);
				pointsRead++;
			} else {
				System.err.println("Number of lines is not divided by the number of attributes!");
				throw new RuntimeException();
			}
			
			dataWriter.close();

			System.out.println("Converted " + pointsRead + " points");
		} catch (FileNotFoundException e) {
			System.err.println("File not converted!");
			e.printStackTrace();
		} catch (IOException e) {
			System.err.println("File not converted!");
			e.printStackTrace();
		}

	}

	public static void main(String[] args) {
		String inputFileName = args[0];
		int numOfAttr = Integer.parseInt(args[1]);

		ConvertToStreamFile converter = new ConvertToStreamFile(inputFileName, numOfAttr);
		converter.convert();
	}

}
