package com.creditrank.spamdetector;

import java.util.*;
import java.util.zip.*;
import java.net.*;
import java.io.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.net.MalformedURLException;
import java.net.URL;
import org.htmlparser.NodeFilter;
import org.htmlparser.Parser;
import org.htmlparser.filters.NodeClassFilter;
import org.htmlparser.tags.LinkTag;
import org.htmlparser.util.NodeList;
import org.htmlparser.util.ParserException;
import java.sql.*;


public class webcrawler {

	int TotalWords;
	double AnchorTextLength;

	public boolean AllowIndex;

	public boolean Follow;
	long UncompressedFileSize = 0;
	long CompressedFileSize = 0;
	public ArrayList<String> TrainedUrls; ////sudhir added
	public Hashtable<String, String> DomainLabels = new Hashtable<String, String>();
	public ArrayList<String> TitleWords;
	public ArrayList<String> AnchorWords;
	public ArrayList<String> CommonWords;
	//ArrayList<String> TrainedUrls;
	public FileWriter fileWriter;
	public FileWriter ArffFileWriter;
	public PrintWriter ArffPrintWriter;
	public PrintWriter pageWriter;

	public SpamDetector spamDetect;

	public webcrawler() {

		spamDetect = new SpamDetector();
		// set default for URL access
		TrainedUrls=new ArrayList<String>(); //sudhir
		URLConnection.setDefaultAllowUserInteraction(false);

		System.out.println("Finished Initializing the Queues");

	}

	void setStatus(String status) {

		System.out.println(status);
	}

	public void BeginProcessing() {

		CommonWords = new ArrayList<String>();
		Connection con = null;
		serializing sh=new serializing();
		try {
			// Preparing a list of frequently used words in English. No Db
			// required here.
			GetCommonWords();
			// Reading the Urls and probably its contents here from the DB.
			ReadDomainsAndTheirLabels();
			// DB part sudhir
			Class.forName("com.mysql.jdbc.Driver").newInstance();
			con = DriverManager.getConnection(
					"jdbc:mysql://mysql.cis.ksu.edu:3306/sudhikrr", "sudhikrr",
					"40Vlf0bK");
			Statement s = con.createStatement();
			System.out
					.println("--------------------------------------------Database connection established");
			s.executeQuery("SHOW DATABASES");
			ResultSet rs = s.getResultSet();
			s.executeUpdate("USE sudhikrr");
			System.out
					.println("---------------------------USING credit rank's database");
			s.executeQuery("select id, url,content from urls");
			rs = s.getResultSet();
			Blob aBlob = null;
			byte[] data;
			String Content = "";
			
			while (rs.next()) {
				// editing end sudhir
				
				String ID = rs.getString("id");
				String url = rs.getString("url");// (String)Urls.nextElement();
				String Label = GetLabel(url); // DomainLabels.get(url);
				// reading blob data
				aBlob = rs.getBlob("content");
				data = aBlob.getBytes(1, (int) aBlob.length());
				// converting blob to string
				Content = new String(data);
				HeuristicDeterminer(url, Label, Content,sh,ID);

			}
			
			PrepareArffValues(sh);
			// done till here sudhir
			
			if (con != null) {
				con.close();
			}
		} catch (Exception e) {
			setStatus("Exception In BeginProcessing() " + e.getMessage());
		}
	}
	/////sudhir start
	
	private void PrepareArffValues(serializing sh)
	{
		
		//Adding the URL into an arraylist so that we can associate the predictions later.
		//System.out.println(CurrentUrl);
		//Write the attributes into the arff file.
		Enumeration e=sh.arffvalues.keys();
		String urlid="";
		try{
			ArffFileWriter = new FileWriter("HeuristicsTrain.arff",true);			
			ArffPrintWriter = new PrintWriter(ArffFileWriter);
			PrepareTrainHeaderPart();
			
			while(e.hasMoreElements())
			{
				urlid=(String)e.nextElement();
				TrainedUrls.add(urlid);
				ArrayList<Double> HeuristicValues=(ArrayList)sh.arffvalues.get(urlid);
				System.out.println("Processed Url is : "+urlid);
				System.out.println("HeuristicValues : "+HeuristicValues.size());
				for(int index = 0;index<HeuristicValues.size();index++)
				{
					if(index ==8)
					{
						String Label = sh.urllabels.get(urlid).toString();
						if(Label.toLowerCase() == "undecided")
							Label = "?";
						if(HeuristicValues.get(index) != -1.0)
						{
							ArffPrintWriter.write(HeuristicValues.get(index)+","+Label);
							
						}
						else
							ArffPrintWriter.write("?,"+Label);
						
						ArffPrintWriter.write("\n");
					}
					else
					{
						if(HeuristicValues.get(index) != -1.0)
							ArffPrintWriter.write(HeuristicValues.get(index)+",");
						else
							ArffPrintWriter.write("?,");
					}	
					
				}
			}
			ArffFileWriter.close();
			ArffPrintWriter.close();
			
		}
		catch (Exception e1) {
			// TODO: handle exception
			System.out.println("Exception in PrepareArffValues() "
					+ e1.getMessage());
		}
		
	}	
	void PrepareTrainHeaderPart()
	{
		
		if(ArffPrintWriter == null)
			ArffPrintWriter = new PrintWriter(ArffFileWriter);
		ArffPrintWriter.println("@relation ClassData");
		ArffPrintWriter.println(" ");
		
		for(int index = 1;index<10;index++)
		{
			
			ArffPrintWriter.println("@attribute "+index+" numeric");
						
		}		
		ArffPrintWriter.println("@attribute Class {normal,spam}");
		ArffPrintWriter.println(" ");
		ArffPrintWriter.println("@data");
		
		
	}
	
	////sudhir end

	private String GetLabel(String Url) {
		// Get the Host of the Url
		String Label = "";
		try {

			URL CrawlUrl = new URL(Url);
			String Host = CrawlUrl.getHost();
			
			if (DomainLabels.containsKey(Host)) {
				Label = DomainLabels.get(Host);
			}
		} catch (MalformedURLException e) {
			// TODO Auto-generated catch block
			setStatus("Exception In GetLabel() " + e.getMessage());
		}
		
		return Label;
	}

	private void ReadDomainsAndTheirLabels() {
		try {
			URL CrawlUrl = null;
			String Host = "";
			BufferedReader stpw = new BufferedReader(new FileReader("Urls.txt"));
			String str1 = stpw.readLine();
			while (str1 != null) {
				if (str1.length() > 0) {
					String Url = "";
					String[] UrlNames = str1.split(" ");
					if (UrlNames.length > 1) {
						if (!UrlNames[0].contains("http://")) {
							Url = "http://" + UrlNames[0];
							CrawlUrl = new URL(Url);
							Host = CrawlUrl.getHost();
						} else {
							Url = UrlNames[0];
							CrawlUrl = new URL(Url);
							Host = CrawlUrl.getHost();
						}
						DomainLabels.put(Host, UrlNames[1]);// Storing the Url
															// as well as its
															// Label.
					}
				}

				str1 = stpw.readLine();
			}

			System.out.println("Total number of Domains found are: "
					+ DomainLabels.size());
			stpw.close();

		} catch (Exception e) {
			System.out.println("Exception in ReadUrls() " + e.getMessage());
		}
	}

	private void HeuristicDeterminer(String url, String Label, String Content,serializing sh,String UrlID) {

		try {
			TotalWords = Content.length();
			GetHeuristicAttributes(Content);
			LinkExtractor(url);
			spamDetect.indexer(url, CommonWords, Label);
			spamDetect.HieuristicTwo(TitleWords);
			spamDetect.HieuristicThree();
			spamDetect.HieuristicFour(AnchorWords);
			spamDetect.HieuristicFive(TotalWords);
			spamDetect.HieuristicSix(CompressedFileSize, UncompressedFileSize);
			spamDetect.HieuristicSeven();
			spamDetect.HieuristicEight();
			spamDetect.HieuristicNine();
			/////sudhir
			if(spamDetect.HeuristicValues.size() == 9)
			{
				sh.urllabels.put(UrlID,Label);
				sh.arffvalues.put(UrlID,spamDetect.HeuristicValues);
			}
			
			////end sudhir
			
			TotalWords = 0;
			AnchorWords.clear();
		} catch (Exception e) {
			System.out.println("Exception in HeuristicDeterminer() "
					+ e.getMessage());

		}
	}

	private void GetCommonWords() {
		try {
			BufferedReader stpw = new BufferedReader(new FileReader(
					"commonwords.txt"));
			String str1 = stpw.readLine();

			while (str1 != null) {
				CommonWords.add(str1.toLowerCase());
				str1 = stpw.readLine();
			}
			stpw.close();

		} catch (Exception e) {
			System.out.println("Exception in GetCommonWords() "
					+ e.getMessage());
		}
	}

	private void LinkExtractor(String Url) {
		StringTokenizer st = null;
		AnchorWords = new ArrayList<String>();
		Parser parser;
		NodeFilter filter;
		NodeList list;
		filter = new NodeClassFilter(LinkTag.class);
		try {
			parser = new Parser(Url);
			if (parser != null) {
				list = parser.extractAllNodesThatMatch(filter);
				for (int k = 0; k < list.size(); k++) {
					String input = list.elementAt(k).toHtml();
					if (input.contains("<a href=\"") || input.contains("href")
							&& !input.contains(".aspx")
							&& !input.contains(".pdf")
							&& !input.contains(".jpeg")
							&& !input.contains(".jpg")
							&& !input.contains(".gif")
							&& !input.contains(".png")
							&& !input.contains(".ppt")
							&& !input.contains(".xls")
							&& !input.contains(".doc")
							&& !input.contains(".docx")) {
						String str1 = input.substring(input.indexOf(">") + 1);
						String AnchorText = str1
								.substring(0, str1.indexOf("<"));

						if (AnchorText.length() > 0) {
							AnchorText = AnchorText.replaceAll("\\W", " ")
									.replaceAll("\\d", " ");

							st = new StringTokenizer(AnchorText);

							while (st.hasMoreTokens()) {
								AnchorText = st.nextToken();

								if (AnchorText.length() > 0) {
									AnchorWords.add(AnchorText);
								}
							}

						}
					}
				}
			}
		} catch (ParserException e) {
			// TODO Auto-generated catch block
			setStatus("Parser Exception In LinkExtractorTest() "
					+ e.getMessage());
		} catch (Exception ex) {
			setStatus("Exception In LinkExtractorTest() " + ex.getMessage());
		}
	}

	private void GetHeuristicAttributes(String Contents) {

		StringTokenizer st = null;
		String Title = "";
		TitleWords = new ArrayList<String>();
		try {
			if (Contents != null && Contents.length() > 0) {
				fileWriter = new FileWriter("source.txt");
				pageWriter = new PrintWriter(fileWriter);

				Pattern TitleMatchPattern = Pattern
						.compile("<title>(.*?)</title>");
				Matcher TitleMatcher = null;
				TitleMatcher = TitleMatchPattern
						.matcher(Contents.toLowerCase());
				if (TitleMatcher.find()) {
					Title = TitleMatcher.group(1).replaceAll("\\W", " ")
							.replaceAll("\\d", " ");
					st = new StringTokenizer(Title);

					while (st.hasMoreTokens()) {
						Title = st.nextToken();
						if (Title.length() > 0) {
							TitleWords.add(Title);

						}
					}
				}
				pageWriter.println(Contents);
				pageWriter.close();
				//String currentdir = System.getProperty("user.dir");

				//String gZipFileName = currentdir + "\\Source.gz";
				String gZipFileName = "Source.gz";
				GZIPOutputStream out = new GZIPOutputStream(
						new FileOutputStream(gZipFileName));
				//String infileName = currentdir + "\\source.txt";
				String infileName = "source.txt";
				FileInputStream inputFileStream = new FileInputStream(
						infileName);
				// Transfer bytes from the inputfile to the gZip output stream
				byte[] buf = new byte[1024];
				int len;
				while ((len = inputFileStream.read(buf)) > 0) {
					out.write(buf, 0, len);
				}
				inputFileStream.close();
				out.finish();
				out.close();
				File file = new File(gZipFileName);

				if (file.exists()) {
					UncompressedFileSize = file.length();
					file.delete();
				}

				file = new File(infileName);
				if (file.exists()) {
					CompressedFileSize = file.length();
					file.delete();
				}

			}
		} catch (Exception ex) {
			setStatus("Exception In GetHeuristicAttributes() "
					+ ex.getMessage());

		}
	}

	public static void main(String args[]) {

		// sudhir edited from here
		try {
			webcrawler crawler = new webcrawler();
			crawler.BeginProcessing();
            RFModelsTestTrain RFT = new RFModelsTestTrain(); 
			RFT.TrainedUrls = crawler.TrainedUrls; 
			RFT.main();
			 
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
}
