import java.util.*;
import java.io.*;

import util.MinHeap;

public class Engine
{
	// Enumerate the methods implemented
	enum QUERY_TYPE
	{
		EXHAUSTIVE,
		RANDOM_PROJECTION,
		SIMPLE_VP,
		RP_TREE
	};

	class SortingElement implements Comparable<SortingElement>
	{
		public int index;
		public int value;
		
		public SortingElement(int index, int value)
		{
			this.index = index;
			this.value = value;
		}

		public int compareTo(SortingElement other)
		{
			return ((value < other.value) ? -1 : 1);
		}
	}

	private HashMap<ConstString, Integer> FeatureIDs;
//	private HashMap<String, Integer> FeatureIDs;
	private HashMap<Integer, Integer> GlobalCounter;
	private Integer ID;
	private int k;
	private int projectionDimension;
	private int dimension, pointLimit;			// dimension equals -1 means that it's using full text
	private HashSet<Integer> maxFreq;			// Only used if the dimension is not -1
	private Integer[] dict;						// Is used as a map from index of dense array to Feature ID 
	private HashMap<Integer, Integer> dict_2;	// Is used as a map from Feature ID to index of dense array 
	private Point[] DataBase, Queries;
	private Point[] projectedDB, projectedQ;	// For Random Projection
	private ArrayList<XMLData> rawData;
	private RPTree NNRPTree;					// Only need initialize once for the tree
	private RandomProjection RPEngine;			// Only need initialize once for the matrix and engine
	private long RPTreeMemory;
	private long RPMemory;
	private long qMem, qTime;					// Used to capture query time and memory usage
	private double aqMem, aqTime;				// Used to capture accumulation of query time and memory usage

	public Engine(int dimension, int pointLimit, int projectionDimension, int k)
	{
		this.dimension = dimension;
		this.pointLimit = pointLimit;
		this.projectionDimension = projectionDimension;
		this.k = k;
		FeatureIDs = new HashMap<ConstString, Integer>();
		dict_2 = new HashMap<Integer, Integer>();
		GlobalCounter = new HashMap<Integer, Integer>();
		if (dimension > 0) dict = new Integer[dimension];	
	}
	
	public void readDB(String directory)
	{
		long curTime = System.currentTimeMillis();
		FileReader fr = new FileReader();
		fr.readFiles(directory);	
		List<File> listFile = fr.getFiles();
		rawData = new ArrayList<XMLData>();
		for (int i = 0; i < listFile.size(); i++)
		{
			Parser.Parse(listFile.get(i), rawData, Configuration.numberOfPoints);
		}

		int len = rawData.size();
		if (len > pointLimit) len = pointLimit;		// Only take required number of points

		DataBase = new Point[len];
		System.out.println(len);
		// Create a HashMap of words -> wordID
		ID = 1;
		ArrayList<HashMap<Object, Double>> FeatureVectors = new ArrayList<HashMap<Object, Double>>();
		for (int i = 0 ; i < len; i++)
		{
			if (i % 1000 == 0) System.out.println(i);
			HashMap<Object, Double> FeatureVector = new HashMap<Object, Double>();
			String[] split = rawData.get(i).splitText();
			for (String item : split)
			{
				item = item.trim().toLowerCase();
				if (item.length() == 0) continue;
				ConstString key = new ConstString(item);
//				String key = item;
				Integer itemID = 0;
				if (!FeatureIDs.containsKey(key)) 
				{
					itemID = ID;
					FeatureIDs.put(key, ID++);
				}
				else itemID = FeatureIDs.get(key);

				// Counting the total occurrence of each word
				if (GlobalCounter.containsKey(itemID)) GlobalCounter.put(itemID, GlobalCounter.get(itemID) + 1);
				else GlobalCounter.put(itemID, 1);
				
				if (FeatureVector.containsKey(itemID)) FeatureVector.put(itemID, FeatureVector.get(itemID) + 1d);
				else FeatureVector.put(itemID, 1d);
				
			}	
			FeatureVectors.add(FeatureVector);
		}
		
		if (dimension != -1)
		{
			// Find k-most-frequent word
			MinHeap kMax = new MinHeap();
			int heapSize = 0, count;
			int lowerBound = 0;
			for (int i = 1; i < ID; i++)
			{
				count = GlobalCounter.get(i);
				if (heapSize < dimension)
				{
					kMax.insert(new SortingElement(i, count));
					heapSize++;
					lowerBound = ((SortingElement)kMax.min()).value;
				}
				else if (count > lowerBound) 
				{
					kMax.removeMin();
					kMax.insert(new SortingElement(i, count));
					lowerBound = ((SortingElement)kMax.min()).value;
				}
			}
		
			maxFreq = new HashSet<Integer>();
			Integer counter = 0;
			while(!kMax.isEmpty())
			{
				Integer ID = ((SortingElement)kMax.removeMin()).index;
				maxFreq.add(ID);
				dict_2.put(ID, counter);
				dict[counter++] = ID;
			}
	
			// Project into lower dimension vector (by removing the words not included in top-k words)
			for (int i = 0; i < len; i++)
			{
				HashMap<Object, Double> FeatureVector = FeatureVectors.get(i);
				HashMap<Object, Double> projectedFeatureVector = new HashMap<Object, Double>();
				if (FeatureVector.size() < dimension)
				{
					for (Object key : FeatureVector.keySet())
					{
						if (maxFreq.contains(key))
						{
							projectedFeatureVector.put(key, FeatureVector.get(key));
						}
					}
				}
				else
				{
					for (Object ID : maxFreq)
					{
						if (FeatureVector.containsKey(ID))
						{
							projectedFeatureVector.put(ID, FeatureVector.get(ID));
						}
					}
				}
				DataBase[i] = new Point(i, projectedFeatureVector, Configuration.type);
			}	
		}
		else
		{
			// If fulltext dimension used, no need to find most frequent words
			System.out.println("Dimension : " + FeatureIDs.size());
			for (int i = 0; i < len; i++) 
			{
				DataBase[i] = new Point(i, FeatureVectors.get(i), Configuration.type);
			}
		}	


		System.out.println("Processing Time : " + (System.currentTimeMillis() - curTime) + "ms");
	}	
	
	// For purpose of testing, create a feature vector of query based on input string
	public Point stringQuery(String q)
	{
		String[] split = q.split("[!\"#$%&'()*+,./:;<=>?\\[\\]^`{|}~\\s]");
		HashMap<Object, Double> feature = new HashMap<Object, Double>();
		HashMap<String, Integer> newFeatureIDs = new HashMap<String, Integer>();		

		Integer ID = FeatureIDs.size() + 1;
		for (String item : split)
		{
			item = item.trim().toLowerCase();
			if (item.length() == 0) continue;

			Integer itemID = 0;

			if (FeatureIDs.containsKey(item))
			{
				itemID = FeatureIDs.get(item);
			}
			else 
			{
				if (newFeatureIDs.containsKey(item))
				{
					itemID = ID;
					newFeatureIDs.put(item, ID++);
				}
				else itemID = newFeatureIDs.get(item);
			}

			if (feature.containsKey(itemID)) feature.put(itemID, feature.get(itemID) + 1d);
			else feature.put(itemID, 1d);
		}	

		return new Point(-1, feature, Configuration.type);
	}

	// For purpose of testing, create a feature vector of query based on average occurrence for each word
	public Point dataAverage()
	{
		int len = FeatureIDs.size();
		Double[] count = new Double[len + 1];
		
		for (Point p : DataBase)
		{
			Map<Object, Double> item = p.FeatureSparse();
			for (Map.Entry<Object, Double> e : item.entrySet()) 
			{
                        	count[(Integer)e.getKey()] += e.getValue();
                	}
		}

		HashMap<Object, Double> feature = new HashMap<Object, Double>();
		for (int i = 1; i <= len; i++)
		{
			feature.put(i, count[i]);
		}

		return new Point(-1, feature, Configuration.type);
	}

	// For purpose of testing, create feature vectors of query read from target directory, and then store the feature on each individual file to be read later
	public void createQueryFromText(String directory, String targetDirectory)
	{
		FileReader fr = new FileReader();
		fr.readFiles(directory);	
		List<File> listFile = fr.getFiles();
		ArrayList<XMLData> rawQuery = new ArrayList<XMLData>();
		for (int i = 0; i < listFile.size(); i++)
		{
			Parser.Parse(listFile.get(i), rawQuery, Configuration.numberOfQueries);
		}

		int len = rawQuery.size();
		// Create a HashMap of words -> wordID
		HashMap<ConstString, Integer> NewFeatureIDs = new HashMap<ConstString, Integer>();
//		HashMap<String, Integer> NewFeatureIDs = new HashMap<String, Integer>();
		Integer ID = FeatureIDs.size() + 1;
		for (int i = 0 ; i < len; i++)
		{
			XMLData temp = rawQuery.get(i);
			HashMap<Integer, Integer> FeatureVector = new HashMap<Integer, Integer>();
			String[] split = temp.splitText();
			for (String item : split)
			{
				item = item.trim().toLowerCase();
				if (item.length() == 0) continue;
				Integer itemID = 0;
				ConstString key = new ConstString(item);
//				String key = item;
				if (!FeatureIDs.containsKey(key)) 
				{
					if (!NewFeatureIDs.containsKey(key)) 
					{
						itemID = ID;
						NewFeatureIDs.put(key, ID++);
					}
					else itemID = NewFeatureIDs.get(key);
				}
				else itemID = FeatureIDs.get(key);

				if (FeatureVector.containsKey(itemID)) FeatureVector.put(itemID, FeatureVector.get(itemID) + 1);
				else FeatureVector.put(itemID, 1);
			}	
			printFeatures(FeatureVector, targetDirectory + "/" + (i + 1));
		}
	}

	// Read all the processed query in the directory
	public void readQuery(String directory)
	{
		ArrayList<Point> queries = new ArrayList<Point>();
		FileReader fr = new FileReader();
		fr.readFiles(directory);	
		List<File> listFile = fr.getFiles();
		
		for (int i = 0; i < listFile.size(); i++)
		{
			queries.add(readProcessedQuery(listFile.get(i).getAbsolutePath()));
		}
		Queries = queries.toArray(new Point[queries.size()]);
	}

	// Parse the target file to extract it's feature vector. Each filename become the ID for the point read
	public Point readProcessedQuery(String fileName)
	{
		HashMap<Object, Double> Feature = new HashMap<Object, Double>();
		int ID = -1;
		try
		{
			File f = new File(fileName);
			FileInputStream fin = new FileInputStream(f);
			InputStreamReader sr = new InputStreamReader(fin);
			BufferedReader is = new BufferedReader(sr);

			String line = null;
			while ((line = is.readLine()) != null)
			{
				String split[] = line.split("\t");
				Integer key = Integer.parseInt(split[0]);
				Double value = Double.parseDouble(split[1]);
				if (dimension > 0)
				{
					if (maxFreq.contains(key)) Feature.put(key, value);
				}
				else
				{
					Feature.put(key, value);
				}
			}
			ID = Integer.parseInt(f.getName());
		} catch (Exception ex)
		{
			ex.printStackTrace();
			return null;
		}	
		return new Point(ID, Feature, Configuration.type);
	}

	// Run all the query against each of the methods. Then store their statistics on a file
	public void runAllTypeQuery()
	{
		NN.SortingElement[][][] result;
		Double[] mem, time, error;
		Double[][] maxDist;
		QUERY_TYPE[] type;
		ArrayList<QUERY_TYPE> type_temp = new ArrayList<QUERY_TYPE>();
		type_temp.add(QUERY_TYPE.EXHAUSTIVE);
		type_temp.add(QUERY_TYPE.SIMPLE_VP);
		if (dimension > 0) // Projection can't work with full text dimension
		{
			if (k == 1) type_temp.add(QUERY_TYPE.RP_TREE);			
			type_temp.add(QUERY_TYPE.RANDOM_PROJECTION);
		}
		
		type = type_temp.toArray(new QUERY_TYPE[type_temp.size()]);
		result = new NN.SortingElement[type.length][][];
		mem = new Double[type.length];
		time = new Double[type.length];
		error = new Double[type.length];
		maxDist = new Double[type.length][];
		
		for(int i = 0; i < type.length; i++)
		{
			result[i] = runAllQuery(type[i]);
			mem[i] = aqMem;
			time[i] = aqTime;
			error[i] = 0d;
			maxDist[i] = new Double[Queries.length];
		}

		// Measure the error returned by each of the algorithm
		for (int i = 0; i < type.length; i++)
		{
			for (int j = 0; j < Queries.length; j++)
			{
				
				maxDist[i][j] = 0d;
				if (type[i] != QUERY_TYPE.RANDOM_PROJECTION)
				{
					for (int l = 0; l < result[i][j].length; l++)
					{
						if (result[i][j][l].value > maxDist[i][j]) maxDist[i][j] = result[i][j][l].value;
					}
				}
				else
				{
					for (int l = 0; l < result[i][j].length; l++)
					{
						Double dist = DataBase[result[i][j][l].index].Distance(Queries[j]);
						if (dist > maxDist[i][j]) maxDist[i][j] = dist;
					}
				}
			}
		}
		
		for (int i = 0; i < type.length; i++)
		{
			for (int j = 0; j < Queries.length; j++)
			{
				error[i] += maxDist[i][j] - maxDist[0][j];
			}
		}
		
		int dim = dimension;
		if (dimension == -1) dim = FeatureIDs.size();
		try
		{
			PrintStream ps = new PrintStream(new FileOutputStream("stat_" + pointLimit + "_" + dimension + ".csv"));
			for(int i = 0; i < type.length; i++)
			{
				if (k == 1 || type[i] != QUERY_TYPE.RP_TREE)
				{
					StringBuilder sb = new StringBuilder();
					sb.append(time[i] + "," + mem[i] + "," + (error[i] / (double)dim));
					ps.println(sb.toString());
				}
			}
			ps.close();
		}
		catch (Exception ex)
		{
			System.out.println("Error in opening file");
			ex.printStackTrace();
		}
		/*
		try
		{
			PrintStream ps = new PrintStream(new FileOutputStream("ErrorResult_" + pointLimit + "_" + dimension + ".csv"));
			for(int i = 0; i < result[0].length; i++)
			{
				StringBuilder sb = new StringBuilder("0");
				for (int j = 1; j < type.length; j++)
				{
					if (type[j] == QUERY_TYPE.RANDOM_PROJECTION) 
					{
						sb.append("," + (DataBase[result[j][i][0].index].Distance(Queries[i]) - result[0][i][0].value));
					}
					else sb.append("," + (result[j][i][0].value - result[0][i][0].value));
				}
				ps.println(sb.toString());
			}
			ps.close();
		}
		catch (Exception ex)
		{
			System.out.println("Error in opening file");
			ex.printStackTrace();
		}
		*/
	}

	// Run all query against certain type of method
	public NN.SortingElement[][] runAllQuery(QUERY_TYPE type)
	{
		NN.SortingElement[][] allResult;
		
		int len = Queries.length;
		allResult = new NN.SortingElement[len][];
		Double[] execTimes = new Double[len];
		Double minTime = Double.MAX_VALUE, maxTime = 0d, accTime = 0d, accMem = 0d;

		// Only need to initialize once for RP Tree and Random Projection Matrix
		if (type == QUERY_TYPE.RP_TREE)
		{
			if (NNRPTree == null) 
			{
				System.gc();
				RPTreeMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
				NNRPTree = new RPTree(DataBase, dimension, dict);
				RPTreeMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() - RPTreeMemory;
				//System.out.println(RPTreeMemory);
			}
		}
		if (type == QUERY_TYPE.RANDOM_PROJECTION)
		{
			if (projectedDB == null)
			{
				System.gc();
				RPMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
				RPEngine = new RandomProjection(dimension, projectionDimension, dict_2);
				projectedDB = RPEngine.doProjection(DataBase);
				projectedQ = RPEngine.doProjection(Queries);
				RPMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() - RPMemory;
				//System.out.println(RPTreeMemory);
			}
		}
		
		
		for (int i = 0; i < len; i++)
		{
			System.gc();

			if (type == QUERY_TYPE.RANDOM_PROJECTION) allResult[i] = runQuery(projectedQ[i], true, type);
			else allResult[i] = runQuery(Queries[i], true, type);
			execTimes[i] = new Long(qTime).doubleValue();
			accMem += qMem;

			if (execTimes[i] < minTime) minTime = execTimes[i];
			if (execTimes[i] > maxTime) maxTime = execTimes[i];
			accTime += execTimes[i];
		}

		double meanTime = accTime / len;
		if (meanTime == 0d) meanTime = 0.0000000001;
		double deviation = 0;
		for (int i = 0; i < len; i++)
		{
			deviation += Math.pow(execTimes[i] - meanTime, 2);
		}
		double stdDev = Math.sqrt(deviation / len);
		System.out.println("Min Time : " + minTime + "ms\nMax Time : " + maxTime + "ms\n" + "Mean Time : " + meanTime + "ms\n" + "Std. Dev : " + stdDev + "ms");
		aqTime = meanTime;
		
		if (type == QUERY_TYPE.RP_TREE) 
		{
			aqMem = (((double)accMem / len) + RPTreeMemory);
			System.out.println("Mean Mem : " + aqMem + " bytes");
		}	
		else if (type == QUERY_TYPE.RANDOM_PROJECTION) 
		{
			aqMem = (((double)accMem / len) + RPMemory);
			System.out.println("Mean Mem : " + aqMem + " bytes");
		}
		else
		{
			aqMem = accMem / len;
			System.out.println("Mean Mem : " + aqMem + " bytes");
		}
		if (aqMem == 0d) aqMem = 0.0000000001;
		return allResult;
	}

	// For the purpose of testing, create a random dense vector
	public Point CreateRandomVectors(int size, int dimension)
	{
		Random randomizer = new Random();
		DataBase = new Point[size];
		rawData = new ArrayList<XMLData>();
		for (int i = 0; i < size; i++)
		{
			Double feature[] = new Double[dimension];
			for (int j = 0; j < dimension; j++)
			{
				feature[j] = (randomizer.nextGaussian() + 1.0) * 10;
			}
			DataBase[i] = new Point(i, feature, Point.DIST_TYPE.EUCLIDEAN);
			rawData.add(new XMLData("Vector-" + (i+1), "", ""));
		}

		Double feature[] = new Double[dimension];
		for (int j = 0; j < dimension; j++)
		{
			feature[j] = (randomizer.nextGaussian() + 1.0) * 10;
		}

		this.dimension = dimension;
		return new Point(-1, feature, Point.DIST_TYPE.EUCLIDEAN);
	}

	public NN.SortingElement[] runQuery(Point q, boolean printResult, QUERY_TYPE type)
	{
		NN.SortingElement[] ref = null;
		long time = System.currentTimeMillis();
		long curMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
		int len = 0;
		String fileName = Configuration.ResultPath + "/result_" + q.ID() + "_" + pointLimit + "_" + dimension;
		
/*
		// Project Data and compare result
		System.out.println("Initializing Projection Matrix");
		RandomProjection RPEngine = new RandomProjection(dimension, projectionDimension);
		System.out.println("Projecting Data...");
		Point[] projectedDB = RPEngine.doProjection(DataBase);
		Point projectedQ = RPEngine.doProjection(q);
		System.out.println("Finish projecting data");
*/
		// Projection Matrix
/*
		try
		{
			PrintStream ps = new PrintStream(new FileOutputStream("debugMatrix.csv"));
			ps.println(RPEngine);
			ps.close();
			//ps.println("=========================");
			PrintStream ps2 = new PrintStream(new FileOutputStream("debugDB.csv"));			
			for (int i = 0; i < DataBase.length; i++) ps2.println(DataBase[i]);
			ps2.close();
			//ps.println("=========================");
			PrintStream ps3 = new PrintStream(new FileOutputStream("debugProjected.csv"));
			for (int i = 0; i < projectedDB.length; i++) ps3.println(projectedDB[i]);			
			ps3.close();
			
			PrintStream ps4 = new PrintStream(new FileOutputStream("debugQuery.csv"));
			ps4.println(q);
			ps4.close();

			PrintStream ps5 = new PrintStream(new FileOutputStream("debugProjectedQuery.csv"));
			ps5.println(projectedQ);
			ps5.close();
		} catch(Exception ex)
		{
			System.out.println(ex.getMessage());
		}
*/
		switch (type)
		{
			case EXHAUSTIVE :
				curMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
				Brute NNBrute = new Brute(DataBase, k);
				time = System.currentTimeMillis();
				ref = NNBrute.Query(q);
				qTime = System.currentTimeMillis() - time;
				qMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() - curMem;
				System.out.println("Brute Query Time : " + qTime);
				if (printResult) printResults(ref, fileName + "_brute.csv", q);
				else
				{
					len = ref.length;
					for (int i = 0; i < len; i++)
					{
						System.out.println("rank " + (i + 1) + ": "+ rawData.get(ref[i].index).docID + ", Score : " + (ref[i].value));
					}
				}		
				break;	

	
			// Exhaustive on projected data
			case RANDOM_PROJECTION :
				curMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
				Brute NNBruteProjected = new Brute(projectedDB, k);
				time = System.currentTimeMillis();
				ref = NNBruteProjected.Query(q);
				qTime = System.currentTimeMillis() - time;
				qMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() - curMem + RPMemory;
				System.out.println("Brute Projected Query Time : " + qTime);
				if (printResult) printResults(ref, fileName + "_pBrute.csv", q);
				else
				{
					len = ref.length;
					for (int i = 0; i < len; i++)
					{
						System.out.println("rank " + (i + 1) + ": "+ rawData.get(ref[i].index).docID + ", Score : " + (ref[i].value));
					}
				}
				break;
	

			case SIMPLE_VP :
				curMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
				simpleVP NNsimpleVP = new simpleVP(DataBase, k);
				time = System.currentTimeMillis();
				ref = NNsimpleVP.Query(q);
				qTime = System.currentTimeMillis() - time;
				qMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() - curMem;
				System.out.println("Simple VP Query Time : " + qTime);
				if (printResult) printResults(ref, fileName + "_simpleVP.csv", q);
				else
				{
					len = ref.length;
					for (int i = 0; i < len; i++)
						System.out.println("rank " + (i + 1) + ": "+ rawData.get(ref[i].index).docID + ", Score : " + (ref[i].value));
				}
				break;
	

	/*
			// VP Tree
			System.out.println("VP Tree : ");
			time = System.currentTimeMillis();
			VPTree NNVPTree = new VPTree(projectedDB);
			System.out.println("VP Construction Time : " + (System.currentTimeMillis() - time));
			time = System.currentTimeMillis();
			ref = NNVPTree.Query(projectedQ);
			System.out.println("VP Query Time : " + (System.currentTimeMillis() - time));
			System.out.println("rank " + (1) + ": "+ rawData.get(ref[0].index).docID + ", Score : " + ref[0].value);
	*/

			case RP_TREE :
			// RP Tree
				curMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
				time = System.currentTimeMillis();
//				System.out.println("RP Construction Time : " + (System.currentTimeMillis() - time));
				ref = NNRPTree.Query(q);
				qTime = System.currentTimeMillis() - time;
				qMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() - curMem + RPTreeMemory;
				System.out.println("RP Tree Time : " + qTime);
/*				System.out.println("RP Query Time : " + (System.currentTimeMillis() - time));
				System.out.println("rank " + (1) + ": "+ rawData.get(ref[0].index).docID + ", Score : " + (ref[0].value));
*/				break;
		}
		return ref;
	}

	// Now conditioned for the sparse vector only, which is limited to non full text dimension
	// It is used to feed the dense vector to LSH and RBC
	public void printDenseData(String DBName, String QName)
	{
//		if (dimension != -1)
//		{
			// Printing DataBase
			try
			{
				PrintStream ps = new PrintStream(new FileOutputStream(DBName));
				int len = DataBase.length;
				int limit = dimension;
				if (dimension == -1) limit = FeatureIDs.size() + 1;
				for (int i = 0; i < len; i++)
				{
					StringBuilder sb = new StringBuilder();
					Map<Object, Double> feature = DataBase[i].FeatureSparse();

					Integer key = 1;
					if (dimension > 0) key = dict[0];
					if (feature.containsKey(key)) sb.append(feature.get(key));
					else sb.append("0");
					for (int j = 1; j < limit; j++)
					{
						sb.append(" ");
						key = (j + 1);
						if (dimension > 0) key = dict[j];
						if (feature.containsKey(key)) sb.append(feature.get(key));
						else sb.append("0");
					}
					ps.println(sb.toString());
				}
				ps.close();
			} catch(Exception ex)
			{
				System.out.println(ex.getMessage());
			}
			
			// Printing Queries
			try
			{
				PrintStream ps2 = new PrintStream(new FileOutputStream(QName));
				int len = Queries.length;
				for (int i = 0; i < len; i++)
				{
					StringBuilder sb = new StringBuilder();
					Map<Object, Double> feature = Queries[i].FeatureSparse();

					Integer key = 1;
					if (dimension > 0) key = dict[0];
					if (feature.containsKey(key)) sb.append(feature.get(key));
					else sb.append("0");
					for (int j = 1; j < dimension; j++)
					{
						sb.append(" ");
						key = (j + 1);
						if (dimension > 0) key = dict[j];
						if (feature.containsKey(key)) sb.append(feature.get(key));
						else sb.append("0");
					}
					ps2.println(sb.toString());
				}
				ps2.close();
			} catch(Exception ex)
			{
				System.out.println(ex.getMessage());
			}
//		}
//		else
//		{
//			System.out.println("Can't print a Full-Text data");
//		}
	}

	public void printResults(NN.SortingElement[] ref, String fileName, Point q)
	{
		try
		{
			PrintStream ps = new PrintStream(new FileOutputStream(fileName));
			int len = ref.length;
			int qID = 0;
			Double maxResult = 0d, maxReal = 0d;
			for (int i = 0; i < Queries.length; i++)
			{
				if (q.ID() == Queries[i].ID())
				{
					qID = i;
					break;
				}
			}
			
			for (int i = 0; i < len; i++)
			{
//				ps.println(rawData.get(ref[i].index).docID + "," + ref[i].value + "," + DataBase[ref[i].index].ID());
				Double realDist = DataBase[ref[i].index].Distance(Queries[qID]);
				ps.println(ref[i].value + "," + realDist + "," + DataBase[ref[i].index].ID());
				if (ref[i].value > maxResult) maxResult = ref[i].value;
				if (realDist > maxReal) maxReal = realDist;
			}
			ps.println(maxResult + "," + maxReal + "," + (maxReal - maxResult));
			ps.close();
		} catch(Exception ex)
		{
			ex.printStackTrace();
		}
		
	}

	// For the purpose of testing, print the entire feature and it's value
	private void printFeatures(Map<Integer, Integer> feature, String fileName)
	{
		try
		{
			PrintStream ps = new PrintStream(new FileOutputStream(fileName));
			for (Map.Entry<Integer, Integer> e : feature.entrySet()) 
				ps.println(e.getKey() + "\t" + e.getValue());
			ps.close();
		} catch(Exception ex)
		{
			System.out.println(ex.getMessage());
		}
	}

	// For the purpose of comparing results with RBC and LSH, read the same dense vectors stored in a file
	public void testPrinted(String DBName, String QName)
	{
		DataBase = new Point[100];
		Queries = new Point[4];
		int counter = 0;
		try
		{
			File f = new File(DBName);
			FileInputStream fin = new FileInputStream(f);
			InputStreamReader sr = new InputStreamReader(fin);
			BufferedReader is = new BufferedReader(sr);

			String line = null;
			counter = 0;
			while ((line = is.readLine()) != null)
			{
				HashMap<Object, Double> feature = new HashMap<Object, Double>();
				String split[] = line.split(" ");
				for (int i = 0; i < split.length; i++)
				{
					if (!split[i].isEmpty()) 
					{
						Double val = Double.parseDouble(split[i]);
						if (val != 0d) feature.put((i+1), val);
					}
				}
				DataBase[counter] = new Point(counter, feature, Point.DIST_TYPE.EUCLIDEAN);
				counter++;
			}
			//f.close();
		} catch (Exception ex)
		{
			ex.printStackTrace();
			return;
		}	
		System.out.println(counter);
		System.out.println("Finish Reading DB");
		try
		{
			File f = new File(QName);
			FileInputStream fin = new FileInputStream(f);
			InputStreamReader sr = new InputStreamReader(fin);
			BufferedReader is = new BufferedReader(sr);

			String line = null;
			counter = 0;
			while ((line = is.readLine()) != null)
			{
				HashMap<Object, Double> feature = new HashMap<Object, Double>();
				String split[] = line.split(" ");
				for (int i = 0; i < split.length; i++)
				{
					if (!split[i].isEmpty()) 
					{
						Double val = Double.parseDouble(split[i]);
						if (val != 0d) feature.put((i+1), val);
					}
				}
				Queries[counter] = new Point(counter, feature, Point.DIST_TYPE.EUCLIDEAN);
				counter++;
			}
			//f.close();
		} catch (Exception ex)
		{
			ex.printStackTrace();
		}	
		System.out.println("Finish Reading Queries");
		System.out.println(counter);
	}

	public static void main(String args[])
	{
		Configuration.loadConfiguration("config.xml");
		Engine runEngine = new Engine(Configuration.dimension, Configuration.numberOfPoints, Configuration.projectionDimension, Configuration.k);
		
		String dbPath, qPath;
		dbPath = Configuration.DBPath;
		qPath = Configuration.ProcessedQueryPath;
		
		runEngine.readDB(dbPath);
		System.out.println("Finish Reading DataBase");
		runEngine.readQuery(qPath);
		System.out.println("Finish Reading Queries");
		
		if (Configuration.runAllQuery) runEngine.runAllTypeQuery();
		else
		{
			for (int i = 0; i < Configuration.individualQueries.length; i++)
				runEngine.runAllQuery(Configuration.individualQueries[i]);
		}

	}
}
