package processing;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.StringTokenizer;

//GK - for pre processing the data
public class DataPreprocessing
{
	//stores the mapping with the original id as the key
	public static HashMap<Integer, int []> mappingFromOriginalID = new HashMap<Integer, int[]>();
	//stores the mapping with the created intermediate id as the key
	public static HashMap<Integer, int []> mappingFromIntermediateID = new HashMap<Integer, int[]>();

	//takes in the data and reorders each item set so that the items are in ascending order
	public static void ReorderingDataToAscending(ArrayList<ArrayList<Integer>> data)
	{
		for (ArrayList<Integer> itemSet : data)
			Collections.sort(itemSet);
	}

	//takes in the data and reorders each item set so that the items are in ascending order
	public static void ReorderingDataToAscending(short[][] data)
	{
		for (int i = 0; i < data.length; i++)
			Arrays.sort(data[i]);
	}

	//maps the input item ids to integers starting from 1
	//called from within the newFPtree.inputDataSet() function
	public static short[][] MapToConsecutiveIntegerValues(ArrayList<ArrayList<Integer>> data)
	{
		int mappedValue = 1;
		for (ArrayList<Integer> itemSet : data)
		{
			for (Integer item : itemSet)
			{
				if(!mappingFromOriginalID.containsKey(item))
				{
					int map[] = new int[2];
					map[0] = item;
					map[1] = mappedValue;
					mappedValue++;

					mappingFromOriginalID.put(map[0], map);
					mappingFromIntermediateID.put(map[1], map);
				}
			}
		}

		short outputData[][] = new short[data.size()][];

		int outputDataIndex = 0;
		for (ArrayList<Integer> itemSet : data)
		{
			outputData[outputDataIndex] = new short[itemSet.size()];
			int outputDataInnerIndex = 0;
			for (Integer item : itemSet)
			{
				outputData[outputDataIndex][outputDataInnerIndex] = (short)mappingFromOriginalID.get(item)[1];
				outputDataInnerIndex++;
			}
			outputDataIndex++;
		}

		mappingFromOriginalID = null;
		ReorderingDataToAscending(outputData);

		return outputData;
	}

	public static short[][] MapToConsecutiveIntegerValues(short[][] data)
	{
		int mappedValue = 1;
		for (int i = 0; i < data.length; i++)
		{
			for (int j = 0; j < data[i].length; j++)
			{
				if(!mappingFromOriginalID.containsKey((int)data[i][j]))
				{
					int map[] = new int[2];
					map[0] = data[i][j];
					map[1] = mappedValue;
					mappedValue++;
					mappingFromOriginalID.put((int)map[0], map);
					mappingFromIntermediateID.put(map[1], map);
				}
			}
		}

		short outputData[][] = new short[data.length][];

		int outputDataIndex = 0;
		for (int i = 0; i < data.length; i++)
		{
			outputData[outputDataIndex] = new short[data[i].length];
			int outputDataInnerIndex = 0;
			for (int j = 0; j < data[i].length; j++)
			{
				outputData[outputDataIndex][outputDataInnerIndex] = (short)mappingFromOriginalID.get((int)data[i][j])[1];
				outputDataInnerIndex++;
			}
			outputDataIndex++;
		}

		ReorderingDataToAscending(outputData);
		mappingFromOriginalID = null;

		return outputData;
	}
	
	//reading the cleaned input data file and converting it to the format required by the FP tree code
	//removing the records that have only one item in the transaction
	public static short[][] CreateInputFromCleanedDataFile(String fileName)
	{
		ArrayList<ArrayList<Integer>> data = new ArrayList<ArrayList<Integer>>();
	    FileReader file;
		try 
		{
			int counterOneItemRecords = 0;
			
			file = new FileReader(fileName);
			BufferedReader fileInput = new BufferedReader(file);
			
			String line = fileInput.readLine();
			while (line != null) 
			{
				StringTokenizer dataLine = new StringTokenizer(line, ",");
	            int numberOfTokens = dataLine.countTokens();
	            if (numberOfTokens == 0) break;
	            if(numberOfTokens > 1)		//not ignoring records with only one item
	            //if(numberOfTokens > 2)		//ignoring records with only one item
	            {	            
		            ArrayList<Integer> itemSet = new ArrayList<Integer>();
		            dataLine.nextToken();
		            while(dataLine.hasMoreTokens())
		            	itemSet.add(Integer.parseInt(dataLine.nextToken()));
		            data.add(itemSet);
	            }
	            else
	            	counterOneItemRecords++;
	            
	            line = fileInput.readLine();
			}
			
			System.out.println("Ignored " + counterOneItemRecords + " one item transactions.");
			
			//inputFormatOkFlag=true;
		    
			
			return MapToConsecutiveIntegerValues(data);
		}
		catch (FileNotFoundException e) 
		{
			e.printStackTrace();
		} 
		catch (IOException e) 
		{
			e.printStackTrace();
		}
		
	    return null;
	}
}
