package com.iiitb.wtp;

import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;

import com.iiitb.model.SeedHandler;

public class Parser {

	String pageSource = null;
	static HITS hits = new HITS();
	static WebGraph wg = new WebGraph();
	static PageRank pg = new PageRank();
	int count = 0;

	public Map<String, String> parse(String contentOfPage, String url,
			String tag, Map<String, String> urlList) throws SQLException,
			IOException {

		// Find if there are any hyper links. Add it to the crawl frontier.
		return hyperlinksExtractor(url, contentOfPage, tag, urlList);

		// return urlList;

	}

	public String parseTag(String contentOfPage) throws SQLException,
			IOException {
		String returnTag;

		// Find if it is human or not
		boolean human = humanOrNotExtractor(contentOfPage);
		System.out.println(" human " + human);
		System.out.println("I am in parser!!");
		if (human) {
			returnTag = "human";
		} else {
			returnTag = "Not human";
		}
		return returnTag;

	}

	private Map<String, String> hyperlinksExtractor(String url,
			String contentOfPage, String tag, Map<String, String> urlLink)
			throws SQLException, IOException {
		SeedHandler dbConn = new SeedHandler();
		Map<String, String> map = new HashMap<String, String>();
		String link = "";
		System.out.println("extracting hyperlinks!");

		String[] ref = contentOfPage.split("href=\"http://");
		String temp1[], temp2[];
		int i = 1;
		//dbConn.insertSearchedData(url, tag);
		try {
			for (i = 1; i < ref.length; i++) {
				temp1 = ref[i].split("\"");
				link = "http://" + temp1[0];

				// try if it is correct URL by connecting it.
				URL connect = new URL(link);
				URLConnection yc = connect.openConnection();

				// Also keep the url in the arraylist
				map.put(link, tag);
				// urlList.put(link, tag);
				wg.addLink(url, link, 1.0);

				// Check if the URL is already in DB otherwise insert it

				dbConn.insertSearchedData(link, tag);

				// Check if the URL is already in DB otherwise insert it

				// }

			}

			hits.getHITS(wg);
			pg.getPageRank(wg);
			
			for (int j = 0; j < wg.numNodes(); j++) {
			//	System.out.println(wg.IdentifyerToURL(j) + " url with id "+ j);
				dbConn.insertHITSData(wg.IdentifyerToURL(j), hits.authorityScore(j), hits.hubScore(j), tag);
				dbConn.insertPGData(wg.IdentifyerToURL(j), pg.scores(j), tag);
			//	System.out.println(" authscore "  + hits.authorityScore(j) + " hubsocre "+ hits.hubScore(j) + pg.pageRank(j)	+ " pagerank score ");
			}

		} catch (Exception e1) {
			e1.printStackTrace();
		}
		return map;

	}

	private boolean humanOrNotExtractor(String contentOfPage) {
		WordFreqCounter wfc = new WordFreqCounter();
		boolean human = wfc.parse(contentOfPage);
		return human;

	}

}
