package org.tabularium.text.html;

import org.tabularium.net.downloader.Downloader;
import org.tabularium.text.analysis.StandardAnalyzer;
import org.tabularium.text.html.HypertextUnit;

import sun.misc.Resource;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.*;

/**
 * Simplifies an HypertextUnit-based representation of a document according to a
 * list of allowable types.
 * 
 * @author Fabio Gasparetti
 * @version 1.0, 12/03/07
 */
public class HypertextUnitTreeSimplifier {

	/**
	 * Given the root of a HypertextUnit-based tree, it simplifies the tree
	 * removing the units with type not included in the allowable list. The
	 * original tree is left unchanged. The unit is shallow-cloned, so the
	 * content-list is different but the other fields are shared between
	 * original and generated trees. The root of the input tree is always
	 * returned.
	 * 
	 * @param root
	 * @param allowableTypes
	 * @param forbiddenTypes
	 * @param keepDescendants true if the subtrees of not-allowed nodes are not dismissed.
	 * @return
	 */
	
	public static HypertextUnit filter(HypertextUnit root,
			Set allowableTypes, Set forbiddenTypes, boolean keepDescendants) {
		return recursiveFilter(root, allowableTypes, forbiddenTypes,
				keepDescendants);
	}

	protected static HypertextUnit recursiveFilter(HypertextUnit unit,
			Set allowableTypes, Set forbiddenTypes, boolean keepDescendants) {
		// shallow clone makes a new children list
		HypertextUnit newUnit = null;
		try {
			newUnit = (HypertextUnit) unit.clone();
		} catch (Exception e) {
			// never here
		}
		newUnit.content.clear();

		HypertextUnit u;
		List children = unit.getChildUnits();
		for (int i = 0; i < children.size(); i++) {
			u = (HypertextUnit) children.get(i);
			if ((allowableTypes == null || allowableTypes.contains(u.getType()))
					&& (forbiddenTypes == null || !forbiddenTypes.contains(u
							.getType()))) {
				u.parent = newUnit;
				newUnit.content.add(recursiveFilter(u, allowableTypes,
						forbiddenTypes, keepDescendants));
			} else {
				if (keepDescendants)
					children.addAll(u.content);
				else {
					// System.out.println("truncate tree: "+u.toIndentString());
				}
				// System.out.println("trashed: "+u.getId()+" of type: "+u.getType());
			}
		}
		return newUnit;
	}

	/**
	 * Removes all the nodes and references in child lists which type is in the
	 * given set. Note: the initial id numbering of nodes is kept in the result
	 * set.
	 */
	public static List filter(List tree, Set allowableTypes,
			Set forbiddenTypes) {
		boolean check[] = new boolean[tree.size()];
		int cont;
		for (cont = 0; cont < check.length; cont++) {
			check[cont] = false;
		}
		// removes the nodes in the list of nodes
		cont = 0;
		for (Iterator i = tree.iterator(); i.hasNext();) {
			HypertextUnit corrente = (HypertextUnit) i.next();
			if ((allowableTypes != null && !allowableTypes.contains(corrente
					.getType()))
					|| (forbiddenTypes != null && forbiddenTypes
							.contains(corrente.getType()))) {
				check[cont] = true;
			}
			cont++;
		}
		for (cont = (check.length - 1); cont >= 0; cont--) {
			if (check[cont] == true) {
				tree.remove(cont);
			}
		}
		// each node has a reference to one or more childs; they must be checked
		// as well
		for (Iterator i = tree.iterator(); i.hasNext();) {
			HypertextUnit corrente = (HypertextUnit) i.next();
			boolean check2[] = new boolean[corrente.getChildUnits().size()];
			int cont2;
			for (cont2 = 0; cont2 < check2.length; cont2++) {
				check2[cont2] = false;
			}
			cont2 = 0;
			for (Iterator y = corrente.getChildUnits().iterator(); y.hasNext();) {
				HypertextUnit corrente2 = (HypertextUnit) y.next();
				if ((allowableTypes != null && !allowableTypes
						.contains(corrente2.getType()))
						|| (forbiddenTypes != null && forbiddenTypes
								.contains(corrente2.getType()))) {
					check2[cont2] = true;
				}
				cont2++;
			}
			for (cont2 = (check2.length - 1); cont2 >= 0; cont2--) {
				if (check2[cont2] == true) {
					corrente.getChildUnits().remove(cont2);
				}
			}
		}
		return tree;
	}
	
	
	public static void main(String[] args)  {
	try {
		org.tabularium.net.downloader.Resource res = Downloader.retrieve("http://www.repubblica.it");
		String doc = new String(res.getObject());
		
		HypertextUnitBuilder hu = new HtmlParserHypertextUnitBuilder();
		HypertextUnit root = hu.build("http://www.cnn.com/",
				doc, new DefaultHypertextUnitFactory());
		StandardAnalyzer an = new StandardAnalyzer();
		an.enableNumberFilter(false);
		an.enablePorterStemmer(false);
		an.enableStopList(false);
		root.textAnalysis(an);

		
		Set forbiddenTypes = new HashSet();
		Set allowableTypes = new HashSet();
		allowableTypes.add("DIV");
		allowableTypes.add("TABLE");
		//System.out.println("root:" + root.toIndentString());
		System.out.println("rootS1:" + HypertextUnitTreeSimplifier.filter(root, allowableTypes, null, true));
		//System.out.println("rootS2:" + HypertextUnitTreeSimplifier.simplify(root, null));
		// List us = root.retrieveUnitsByLink("http://www.cnn.com/");
		/*
		 * for (int i = 0; i < us.length; i++) { System.out.println("" +
		 * us[i].toString()); }
		 */

		/*
		 * try { Properties p = new Properties(); p.load(new
		 * java.io.FileInputStream("htmlcode.properties"));
		 * HtmlUtils.setEscapeTable(p); } catch (Exception ex) {
		 * System.out.println("unable to load htmlcode table"); }
		 */
		// extractHtmlLinkWindows(".", doc.toString(), 5, new
		// SimpleLuceneAnalyzer());
	} catch (Exception ex) {
		System.out.println(ex.toString());
		ex.printStackTrace();
	}
	}
}
