package core;

import static view.Constants.BUFFER_SIZE;
import static view.Constants.MAPPING;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;

import controller.Controller;

/**
 * @author Mostafa Mahmod Mahmod Eweda
 * <pre>mostafa_eweda17@yahoo.com</pre>
 * @since JDK 1.6
 * @version 1.0
 * 
 * The analyzer of the files that manages the statistics operations, huffman tree construction
 * and canonical table construction
 * 
 * @see CompressedChar
 */
public class FileAnalysis
{

	/**
	 * The mapping of the characters in a direct access frequency array
	 */
	private int[] freqs;

	/**
	 * The Table that provides hashed access --> one instruction access to the
	 * chars by their ASCII code
	 */
	private CompressedChar[] chars;

	/**
	 * The source file the analysis are done on
	 */
	private File file;

	/**
	 * the length of the gathered characters statistics;
	 * This also defines the number of different character types in the file
	 */
	private int charsLength;

	/**
	 * Creates a File Analyzer on a given path for a file
	 * @param path the path of the source file
	 */
	public FileAnalysis(String path)
	{
		this(new File(path));
	}

	/**
	 * Creates a File Analyzer on a given path for a file
	 * @param file the source file
	 */
	public FileAnalysis(File file)
	{
		freqs = new int[256];
		chars = new CompressedChar[256];
		this.file = file;
		charsLength = 0;
	}

	/**
	 * Collects and registers statistics on the current working file
	 */
	public void collectStatistics()
	{
		try {
			BufferedInputStream in = new BufferedInputStream(
						new FileInputStream(file));
			byte[] buff = new byte[BUFFER_SIZE];
			int numRead = 0;
			while ((numRead = in.read(buff)) != -1)
			{
				onStatistics(buff, numRead);
				// for (int i = 0; i < numRead; i++)
				// System.out.print((char)buff[i]);
			}
			in.close();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/**
	 * puts the given buffer data on the statistics
	 * @param buff the byte array got from the file
	 * @param len the lenth of the bufffer
	 */
	private void onStatistics(byte[] buff, int len)
	{
		for (int i = 0; i < len; i++)
			freqs[buff[i] + MAPPING]++;
	}

	/**
	 * Get the statistics data generated by the analyzer
	 * @return Node[] containing the data for the table generation
	 */
	public Node[] constructStatistics()
	{
		ArrayList<Node> nodes = new ArrayList<Node>();
		for (int i = 0; i < freqs.length; i++)
			if (freqs[i] > 0)
			{
				charsLength++;
				// System.out.println("Char :" + (char) (i+MAPPING) +
				// " reapeated :"
				// + freqs[i]);
				nodes.add(new Node((byte) (i), freqs[i]));
			}
		return nodes.toArray(new Node[nodes.size()]);
	}

	/**
	 * Creates The Huffman tree with the given node data
	 * @param nodes the data generated by statistical methods
	 * left = 0, right = 1
	 * @return The root of the tree
	 */
	public Node constructTree(Node[] nodes)
	{
		HeapSorter sorter = new HeapSorter(nodes);
		sorter.buildheap();
//		Arrays.sort(nodes);

		/* 
		 * iterate to find smalleest frequencies nodes to create the parent node
		 * The heap construction and heapification is used to find the smallest
		 * values in the array
		*/
		Node smallest, secondSmallest;
		for (int i = 0, n = nodes.length - 2; i < n; i++)
		{
			smallest = nodes[i];
			secondSmallest = nodes[i + 1];
			if (nodes[i + 2].compareTo(nodes[i + 1]) < 0) {
				secondSmallest = nodes[i + 2];
				nodes[i + 2] = nodes[i + 1];
			}
			nodes[i + 1] = new Node((byte) 0, smallest.getFrequencies()
					+ secondSmallest.getFrequencies(), smallest, secondSmallest);

			// heapify the newly created node down the array
			sorter.otherHeapify(i + 1);
			nodes[i] = null;
		}
		Node root = new Node((byte) 0, 0, nodes[nodes.length - 2],
				nodes[nodes.length - 1]);

		// traverse the tree to find the codes of the compressed chars
		registerCompChars(root);
		return root;
	}

	/**
	 * Constructs the canonical code table for the compressed
	 * chars to be written minimally on the compressed file
	 * @return the Table represented as CompressedChar array
	 */
	public CompressedChar[] constructTable()
	{
		ArrayList<CompressedChar> compressedChars = new ArrayList<CompressedChar>();
		constructTable(Controller.getInstance().getRoot(), compressedChars);
		Collections.sort(compressedChars);
		CompressedChar[] compChars = compressedChars
				.toArray(new CompressedChar[compressedChars.size()]);
		long res = -1;
		CompressedChar ch;
		int len = compChars[0].length;
		// System.out.println("\nTable is:\n");
		for (int i = 0; i < compChars.length; i++) {
			ch = compChars[i];
			res += 1;
			if (ch.length < len)
			{
				/* Note: the use of the triple shift operator instead of the double shift
				 * operator because the double shift operator shifts right arithmetically
				 * */ 
				res = res >>> (len - ch.length);
			}
			len = ch.length;
			compChars[i].sequence = res;
			// System.out.println("Char: " + (char)(ch.ch + MAPPING) +
			// " Sequence: " + res + " Length: " + ch.length);
		}
		return compChars;
	}

	/**
	 * Iterates recursively on the leaf nodes of the Huffman tree
	 * to construct the needed data for table construction
	 * @param temp the root of the Huffman Tree
	 * @param compressedChars
	 */
	private void constructTable(Node temp,
			ArrayList<CompressedChar> compressedChars)
	{
		if (temp.isExternal()) {
			int c = temp.character();
			if (c < 0)
				c = 256 + c;
			// System.out.println(chars[c]);
			// output node data
			compressedChars.add(chars[c]);
			return;
		}
		constructTable(temp.left(), compressedChars);
		// traverse left subtree
		constructTable(temp.right(), compressedChars);
		// traverse right subtree
	}

	public void registerCompChars(Node root)
	{
//		StringBuffer buffer = new StringBuffer();
		registerCompChars(root, "", 0); // XXX the buffer sending to register codes
//		return buffer.toString();
	}

	private void registerCompChars(Node node, String path,
			int level)
	{
		if (node.isExternal()) {
//			buffer.append((char) (node.character() + MAPPING) + ": " + path
//					+ " level : " + level + " number : " + Long.parseLong(path)
//					+ "\n");
			// output node data
			int c = node.character();
			if (c < 0)
				c = 256 + c;
			chars[c] = new CompressedChar(Long.parseLong(path), (byte) level, (byte) (c));
			return;
		}
		registerCompChars(node.left(), path + "0", level + 1);
		// traverse left subtree
		registerCompChars(node.right(), path + "1", level + 1);
		// traverse right subtree
	}

	/**
	 * @return chars
	 * @see #chars
	 */
	public CompressedChar[] getChars()
	{
		return chars;
	}

	/**
	 * Sets the current statistical file 
	 * @param dataSource the source file
	 */
	public void setFile(File dataSource)
	{
		this.file = dataSource;
	}
}
