package m3f.io;

import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.List;
import java.util.Random;

import m3f.matrix.SparseVector;

public class BinaryTransformer {
	
	public void transform(String filename, String outputFilename, boolean normalizeInputVectors){
		MatrixReader reader = MatrixReaderFactory.newInstance(filename, normalizeInputVectors);
		reader.start(filename);
		int rows = reader.getMatrixRows();
		int batchSize = 1000;
		int totalBatches = (int)Math.ceil( (double)rows/(double)batchSize );
		System.out.println("Reading source file with "+rows+" vectors");
		try{
			DataOutputStream out = new DataOutputStream(new BufferedOutputStream(
		              new FileOutputStream(outputFilename)));
			for(int k = 0; k < totalBatches; k++){
				ArrayList<SparseVector> vectors = reader.readVectors(batchSize);
				for(SparseVector v: vectors){
					out.writeInt(v.size());
					Enumeration<Integer> keys = v.getNonZeros();
					while(keys.hasMoreElements()){
						int key = keys.nextElement();
						double value = v.get(key);
						if(value != 0.0D){
							out.writeInt(key);
							out.writeDouble(value);
						}
					}
				}
			}
			out.close();
		}catch(IOException ex){
			ex.printStackTrace();
		}
	}
	
	public void randomizeCopy(String input, String output, long randomSeed, boolean normalizeInputVectors){
		MatrixReader reader = MatrixReaderFactory.newInstance(input, normalizeInputVectors);
		reader.start(input);
		Hashtable<Integer, SparseVector> vectors = new Hashtable<Integer, SparseVector>();
		int rows = reader.getMatrixRows();
		List<Integer> rowOrder = new ArrayList<Integer>();
		for(int k = 0; k < rows; k++){
			rowOrder.add(k);
		}
		Collections.shuffle(rowOrder, new Random(randomSeed));
		int batchSize = rows/10;
		try{
			DataOutputStream out = new DataOutputStream(new BufferedOutputStream(
		              new FileOutputStream(output)));
			int vectorId = 0;
			int orderPointer = 0;
			int writtenVectors = 0;
			while(writtenVectors < rows){
				// Load batch in the hash table
				ArrayList<SparseVector> batch = reader.readVectors(batchSize);
				for(SparseVector vec: batch){
					vectors.put(vectorId, vec);
					vectorId++;
				}
				if(vectorId == rows) vectorId = 0;
				// Write available vectors in the given order
				int candidate = rowOrder.get(orderPointer);
				SparseVector vec = vectors.get( candidate );
				while(vec != null){
					writeVector(out, vec);
					writtenVectors++;
					orderPointer++;
					vectors.remove(candidate);
					if(writtenVectors < rows && orderPointer < rowOrder.size()){
						candidate = rowOrder.get(orderPointer);
						vec = vectors.get( candidate );
					}else{
						vec = null;
					}
				}
				// Check if the next candidate is far away
				if(candidate > vectorId + 2*batchSize){
					vectors.clear();
				}
			}
			out.close();
		}catch(IOException ex){
			ex.printStackTrace();
		}
	}
	
	private void writeVector(DataOutputStream out, SparseVector v) throws IOException{
		out.writeInt(v.size());
		Enumeration<Integer> keys = v.getNonZeros();
		while(keys.hasMoreElements()){
			int key = keys.nextElement();
			double value = v.get(key);
			if(value != 0.0D){
				out.writeInt(key);
				out.writeDouble(value);
			}
		}
	}
	
}
